mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-03-31 21:27:28 +00:00
Compare commits
592 Commits
4.3.5
...
PRWLR-5266
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa69563fc2 | ||
|
|
ef4750856c | ||
|
|
b0af1390b5 | ||
|
|
bc3cd43126 | ||
|
|
087dae07d8 | ||
|
|
0baf4fb224 | ||
|
|
0f8ea48f2f | ||
|
|
ec207c50ce | ||
|
|
b59b40b822 | ||
|
|
aa51045329 | ||
|
|
1a9f854063 | ||
|
|
6bdcb509e1 | ||
|
|
ce1e9de104 | ||
|
|
2471bc569a | ||
|
|
d0ef75d8d9 | ||
|
|
aa79a289ce | ||
|
|
0340ab9570 | ||
|
|
a2929f2efb | ||
|
|
bf4db86dec | ||
|
|
a339dafcc6 | ||
|
|
f376516aad | ||
|
|
816b49fac5 | ||
|
|
6851350093 | ||
|
|
d5873c0437 | ||
|
|
a2dba30869 | ||
|
|
0662dff13f | ||
|
|
0ae26bddfc | ||
|
|
43efabef6c | ||
|
|
e73fc14f62 | ||
|
|
89fe8fa8e2 | ||
|
|
634ef2e599 | ||
|
|
4efb70a508 | ||
|
|
c3ae0aa873 | ||
|
|
a109cd2816 | ||
|
|
78fb540bbb | ||
|
|
5b543bf058 | ||
|
|
9802fc141a | ||
|
|
ea038085ba | ||
|
|
6ff1c436a0 | ||
|
|
1b50fdba28 | ||
|
|
230d2571f9 | ||
|
|
6c818cbcc3 | ||
|
|
694cee1afb | ||
|
|
bc89f4383e | ||
|
|
84d4e4a604 | ||
|
|
5fbf8ddfe9 | ||
|
|
ddcd06d9be | ||
|
|
5214a37d6d | ||
|
|
a1f4ae73cf | ||
|
|
d0bc37c281 | ||
|
|
85393e6f78 | ||
|
|
e3104ae5ee | ||
|
|
be523c11c8 | ||
|
|
797b627695 | ||
|
|
5ac670ed4f | ||
|
|
bf9111397b | ||
|
|
17dd9de6d8 | ||
|
|
e4400ecf10 | ||
|
|
cbba5acc31 | ||
|
|
046f1b2e5f | ||
|
|
9e8f88c889 | ||
|
|
2d73b9b8f4 | ||
|
|
9a7190c9c2 | ||
|
|
a2b6bdc461 | ||
|
|
056d482023 | ||
|
|
239b248935 | ||
|
|
5bd394dffe | ||
|
|
1195b75acc | ||
|
|
fee70bc9b4 | ||
|
|
01716d9020 | ||
|
|
b87e6d20d7 | ||
|
|
11592634f2 | ||
|
|
bc308de571 | ||
|
|
4bee4d482a | ||
|
|
82ec3e8779 | ||
|
|
85777546e8 | ||
|
|
ec69d8073a | ||
|
|
e6053ce218 | ||
|
|
f01910e4f2 | ||
|
|
8848cadc0a | ||
|
|
2c7d71a0d9 | ||
|
|
dcd1b1121a | ||
|
|
8a6e222f7a | ||
|
|
a4c39c25f1 | ||
|
|
628d50cf0d | ||
|
|
f70e3deade | ||
|
|
14f06d6497 | ||
|
|
3c6e06837c | ||
|
|
e778444d1d | ||
|
|
a4cca188ef | ||
|
|
76ee608ef8 | ||
|
|
7af5c82371 | ||
|
|
172530153c | ||
|
|
0114d0462f | ||
|
|
6502330512 | ||
|
|
9bf9ebe4fd | ||
|
|
406d5864ee | ||
|
|
0f9ebecbb7 | ||
|
|
0331af02ac | ||
|
|
64fb823276 | ||
|
|
33f2c80a78 | ||
|
|
84ce7a8b52 | ||
|
|
1a6b2eaa7d | ||
|
|
df373279e9 | ||
|
|
6a09171851 | ||
|
|
93d257941b | ||
|
|
28f8915f6f | ||
|
|
fef99fd5fb | ||
|
|
1e1c7cc1ce | ||
|
|
7e7d86f14a | ||
|
|
41cdc2bcc7 | ||
|
|
c41866db38 | ||
|
|
f36d23c9a7 | ||
|
|
8ac28fbcfd | ||
|
|
7f41ae7385 | ||
|
|
4c5f3a212c | ||
|
|
ffa29f2f6e | ||
|
|
2ef9e27ee3 | ||
|
|
d4b93d79b5 | ||
|
|
d00afbdc87 | ||
|
|
5b0868e26c | ||
|
|
415c319208 | ||
|
|
1aca7a754c | ||
|
|
147c3c455b | ||
|
|
d997ebb2cc | ||
|
|
50cb79ee2f | ||
|
|
2b34fd39f6 | ||
|
|
0c82137834 | ||
|
|
413b86e7cf | ||
|
|
23a20a582e | ||
|
|
8411fcb5fc | ||
|
|
41e585643b | ||
|
|
aca5824240 | ||
|
|
e65b346afd | ||
|
|
98cb954f74 | ||
|
|
778edd5fec | ||
|
|
06deda7e5f | ||
|
|
26a00a14df | ||
|
|
12abea371d | ||
|
|
a17cf1bbb6 | ||
|
|
5d51942768 | ||
|
|
3122d727a5 | ||
|
|
e5f89d5bc7 | ||
|
|
efc60d2bf4 | ||
|
|
f7fd355dc1 | ||
|
|
7bd402bf4e | ||
|
|
b69962efb6 | ||
|
|
2b8b223403 | ||
|
|
a024ab31a0 | ||
|
|
9969e271ed | ||
|
|
f1449b66d6 | ||
|
|
3c0f360244 | ||
|
|
6e3c008a89 | ||
|
|
9d97b1a7ee | ||
|
|
d07f1e982a | ||
|
|
402e0e3107 | ||
|
|
c5716bf9b6 | ||
|
|
bfdff563e6 | ||
|
|
4be83f240a | ||
|
|
45c32abcdf | ||
|
|
c0ac4c7c30 | ||
|
|
c90cb3712b | ||
|
|
23c3884ab7 | ||
|
|
a491e39a18 | ||
|
|
78d2fb9fd5 | ||
|
|
aac6038565 | ||
|
|
0449d6372c | ||
|
|
bc1e6c0626 | ||
|
|
c1d061ef70 | ||
|
|
9788fe4236 | ||
|
|
7fd0798b7c | ||
|
|
82ab439e9a | ||
|
|
54280ee2dc | ||
|
|
434460b978 | ||
|
|
808fa96407 | ||
|
|
2c0c1f7d09 | ||
|
|
037e40f8e4 | ||
|
|
e0ed891fc4 | ||
|
|
dfc8e3e38f | ||
|
|
aef4a68c46 | ||
|
|
3c929bd68f | ||
|
|
444d820f98 | ||
|
|
304bb27502 | ||
|
|
a6db526eec | ||
|
|
3ace44979a | ||
|
|
493d6a9210 | ||
|
|
3762d70ba3 | ||
|
|
03a26ec507 | ||
|
|
c3e3381c63 | ||
|
|
f8a8266c9d | ||
|
|
d9c2933dc5 | ||
|
|
cad99c5e0f | ||
|
|
9f2de7d2f9 | ||
|
|
4181ca56be | ||
|
|
d45750b042 | ||
|
|
16191a7b15 | ||
|
|
0c149461b3 | ||
|
|
3ee39cff2a | ||
|
|
41ba118cc4 | ||
|
|
e0587fe0cf | ||
|
|
50481665ce | ||
|
|
a49c744e08 | ||
|
|
aa32634105 | ||
|
|
b27898de1d | ||
|
|
b703357027 | ||
|
|
27cd9b22df | ||
|
|
5bf85366e0 | ||
|
|
30bc971f4b | ||
|
|
3950d7eba8 | ||
|
|
2f8a3d2ef8 | ||
|
|
3b64bbd3a8 | ||
|
|
09d099891a | ||
|
|
a6b10a8611 | ||
|
|
c239ede3f9 | ||
|
|
66f2754017 | ||
|
|
9138ecdce9 | ||
|
|
2b66368cf2 | ||
|
|
aa3425a7de | ||
|
|
a31b15c26c | ||
|
|
f2301d5ed6 | ||
|
|
df10253056 | ||
|
|
d5acdc766a | ||
|
|
e389e0136f | ||
|
|
8bb3bd0dcb | ||
|
|
4d4bf3fa11 | ||
|
|
e99c58405c | ||
|
|
2177704b4b | ||
|
|
2ffe7f3ef7 | ||
|
|
158263a8bf | ||
|
|
469986dd28 | ||
|
|
ff101087bf | ||
|
|
b2151e2e9c | ||
|
|
2c4244b1fb | ||
|
|
260cdf575a | ||
|
|
ab4190c215 | ||
|
|
7f97b0a57f | ||
|
|
2c2dd82d0c | ||
|
|
2511df1732 | ||
|
|
f955dd76d9 | ||
|
|
a08cc769c8 | ||
|
|
77ac5e3b91 | ||
|
|
2da8f2b1eb | ||
|
|
38e024216c | ||
|
|
8e4847ec89 | ||
|
|
c6d34e8089 | ||
|
|
880523076d | ||
|
|
3d2f1a3aa7 | ||
|
|
c9ff96144d | ||
|
|
234f8c2958 | ||
|
|
da87c0d81e | ||
|
|
7732ec7d34 | ||
|
|
a1b9b2171f | ||
|
|
30e3fd9e46 | ||
|
|
3db541a42a | ||
|
|
d5abe16180 | ||
|
|
564b18c388 | ||
|
|
13e40eb03e | ||
|
|
b402ced402 | ||
|
|
6bbb9d04a6 | ||
|
|
6616657c91 | ||
|
|
853b833cfb | ||
|
|
c047b29140 | ||
|
|
c4a39662ae | ||
|
|
66e804f212 | ||
|
|
9d4fa55c13 | ||
|
|
ff05ce4da1 | ||
|
|
0474c7995c | ||
|
|
1a679f371f | ||
|
|
05f7170add | ||
|
|
19acb873af | ||
|
|
0b566f9666 | ||
|
|
67bf89537a | ||
|
|
d0681a9e20 | ||
|
|
31bff99b3d | ||
|
|
48c7e65a39 | ||
|
|
1b407639f0 | ||
|
|
4d7d5718d5 | ||
|
|
7955048e79 | ||
|
|
8e0b715f12 | ||
|
|
1d81261d97 | ||
|
|
114a3088a4 | ||
|
|
bc8f3eba4d | ||
|
|
8e087196c9 | ||
|
|
744e7ff5ac | ||
|
|
90b84b57d3 | ||
|
|
0a2b7cf152 | ||
|
|
ebbccd04f1 | ||
|
|
2b431fc79f | ||
|
|
fe7c3e7548 | ||
|
|
0e5f929044 | ||
|
|
47a6e28d71 | ||
|
|
de5742433b | ||
|
|
3fcccd0bcd | ||
|
|
00938cadb1 | ||
|
|
9fb26643ba | ||
|
|
e4890f9d9d | ||
|
|
980b9b4770 | ||
|
|
348cea67c0 | ||
|
|
f4d89066d9 | ||
|
|
b26dc899be | ||
|
|
25327d618d | ||
|
|
3951295c0c | ||
|
|
ff9c3b52d6 | ||
|
|
af8c18eb4e | ||
|
|
6fbfcc7f5f | ||
|
|
7c7132f9c4 | ||
|
|
62e30f929c | ||
|
|
ddaafd5876 | ||
|
|
1f43e6eff9 | ||
|
|
aa118c05c5 | ||
|
|
cca17b9378 | ||
|
|
14ed19e3a8 | ||
|
|
8caf8f794c | ||
|
|
cba9ad61e4 | ||
|
|
e64a0eff0f | ||
|
|
23c65b8fde | ||
|
|
a7c93f3237 | ||
|
|
7b9402f3d0 | ||
|
|
4badcca4f8 | ||
|
|
c6daa60f26 | ||
|
|
f9aa2bb8be | ||
|
|
66ac395705 | ||
|
|
16a251254e | ||
|
|
751958907c | ||
|
|
60012ab19d | ||
|
|
65d7ba020b | ||
|
|
9456c6198a | ||
|
|
45ce1a0650 | ||
|
|
4c5db5295c | ||
|
|
a2ad0cdf30 | ||
|
|
0c70a64e84 | ||
|
|
73c96f8346 | ||
|
|
0974c5f333 | ||
|
|
7db0746416 | ||
|
|
8f0bf5e896 | ||
|
|
57abe1c839 | ||
|
|
43183962ad | ||
|
|
87948b458e | ||
|
|
ab5c3eb4f8 | ||
|
|
320a2a2c77 | ||
|
|
dbc8e140e3 | ||
|
|
21ac395d4c | ||
|
|
8a8c2b5097 | ||
|
|
3bea772c6b | ||
|
|
34679c98d6 | ||
|
|
2b41445d57 | ||
|
|
796c87bc93 | ||
|
|
a83e08aa9e | ||
|
|
ae794c7c32 | ||
|
|
edc78bfd6b | ||
|
|
9263adeb78 | ||
|
|
bfdc87723b | ||
|
|
8d23e81b1c | ||
|
|
f0cd924016 | ||
|
|
c425e8249b | ||
|
|
1ece8bbcd6 | ||
|
|
5fb2d7c3ce | ||
|
|
64aebe84fe | ||
|
|
de831b0abe | ||
|
|
68af4f6c73 | ||
|
|
52981b54b9 | ||
|
|
a366594714 | ||
|
|
1fb36f316b | ||
|
|
30ffa8f00b | ||
|
|
5855918ade | ||
|
|
f9005c875f | ||
|
|
91bf99ca45 | ||
|
|
8176063fef | ||
|
|
3373822240 | ||
|
|
7e16702b2f | ||
|
|
f54b64f1f8 | ||
|
|
2c337ab3f6 | ||
|
|
5279d937d7 | ||
|
|
48c31a1616 | ||
|
|
917a2ad0fe | ||
|
|
8cfc4c56cf | ||
|
|
99e9e42a17 | ||
|
|
13c95ba131 | ||
|
|
600a8c7804 | ||
|
|
64fb52fc5e | ||
|
|
92b6e7230d | ||
|
|
cc8bc781c1 | ||
|
|
edbe463d73 | ||
|
|
8ace8c01cf | ||
|
|
8f37252676 | ||
|
|
c0c59968bf | ||
|
|
9f5a909be3 | ||
|
|
90975bdadc | ||
|
|
7d1fad9eb7 | ||
|
|
983c79ad3b | ||
|
|
96e73fcb63 | ||
|
|
70a3736073 | ||
|
|
1e8e8ba65c | ||
|
|
359a1f2c8e | ||
|
|
2e4f8cbfc7 | ||
|
|
482aee0d9d | ||
|
|
0ae3374e81 | ||
|
|
ddc088859e | ||
|
|
5e3da2d687 | ||
|
|
1af7f658a8 | ||
|
|
1298620da8 | ||
|
|
75c48cfaa3 | ||
|
|
3406a07ae5 | ||
|
|
cc9e1c5af8 | ||
|
|
0343f01cca | ||
|
|
cad7985c28 | ||
|
|
71030f6f42 | ||
|
|
6883467d2f | ||
|
|
2c6944176f | ||
|
|
1ef15f0b24 | ||
|
|
f5b0583df5 | ||
|
|
db225e9d2a | ||
|
|
c9ae9df87f | ||
|
|
159a090c02 | ||
|
|
605c6770e5 | ||
|
|
ae950484ed | ||
|
|
c54b815b90 | ||
|
|
7a937c7708 | ||
|
|
d62e74853e | ||
|
|
bab59bc86e | ||
|
|
39e8485fc1 | ||
|
|
b9f46cafff | ||
|
|
48377ca865 | ||
|
|
4d902e02bb | ||
|
|
e146491d4b | ||
|
|
4eed5c7a99 | ||
|
|
f169599a56 | ||
|
|
95768baa9e | ||
|
|
d8d348f609 | ||
|
|
bd336250ee | ||
|
|
a975e96a45 | ||
|
|
3933440a08 | ||
|
|
36e7bf0912 | ||
|
|
897e25dd3c | ||
|
|
f4a8059f9b | ||
|
|
71d844c101 | ||
|
|
c2b2754926 | ||
|
|
cfd4019281 | ||
|
|
989fce300d | ||
|
|
70fdc2693e | ||
|
|
9797c11152 | ||
|
|
007c1febf7 | ||
|
|
163027a49d | ||
|
|
80c4802b36 | ||
|
|
285eb45673 | ||
|
|
5c2f2ee3b3 | ||
|
|
1f83e4fe7b | ||
|
|
b29f99441a | ||
|
|
82c065bff4 | ||
|
|
168d44d14b | ||
|
|
910a72140b | ||
|
|
d988877173 | ||
|
|
4fd673fd7c | ||
|
|
1bff2451e5 | ||
|
|
0921daf18b | ||
|
|
7ff80dbb8f | ||
|
|
f487bda1fe | ||
|
|
d61e999b8f | ||
|
|
bcb63d0b2d | ||
|
|
71f50422ad | ||
|
|
2b49aa8e89 | ||
|
|
921b6b1e85 | ||
|
|
fc155e8368 | ||
|
|
79f1cf89cf | ||
|
|
496d4daf01 | ||
|
|
559c0d4e0b | ||
|
|
2fda2388bb | ||
|
|
0f79312c33 | ||
|
|
472aea6a91 | ||
|
|
0d18406f80 | ||
|
|
05da5d1796 | ||
|
|
fb449cede8 | ||
|
|
61df2ce0c2 | ||
|
|
b7e20344a8 | ||
|
|
c2552ee508 | ||
|
|
57f1fa5bfa | ||
|
|
0b238243b1 | ||
|
|
df405254c6 | ||
|
|
460acf2860 | ||
|
|
dec3e652c5 | ||
|
|
fc03188bfb | ||
|
|
ff244138d9 | ||
|
|
903f9c576f | ||
|
|
0005f86a5f | ||
|
|
a2144ad353 | ||
|
|
5f075b296d | ||
|
|
0c7b960e08 | ||
|
|
c65e91f834 | ||
|
|
5876fea163 | ||
|
|
a557d62d84 | ||
|
|
f25319f3f6 | ||
|
|
1e02b05d2d | ||
|
|
78042063cb | ||
|
|
8129b174f1 | ||
|
|
3f78fb4220 | ||
|
|
e11bb478d6 | ||
|
|
dec5fb6428 | ||
|
|
256ccfea79 | ||
|
|
1a8bc14587 | ||
|
|
8483486095 | ||
|
|
7aaecbabab | ||
|
|
5cc9554c23 | ||
|
|
5d42ae6e6f | ||
|
|
38b73fb0c0 | ||
|
|
84a76f4535 | ||
|
|
a126fd82b3 | ||
|
|
bf139138e0 | ||
|
|
0fcf4243f5 | ||
|
|
bbb0248bc1 | ||
|
|
e6581255c2 | ||
|
|
717932ae26 | ||
|
|
3f56731e6d | ||
|
|
0f837f658e | ||
|
|
b70977163e | ||
|
|
98fc624010 | ||
|
|
ccb755340f | ||
|
|
49ff901195 | ||
|
|
e7d0d49809 | ||
|
|
47bb97961c | ||
|
|
1178317567 | ||
|
|
edd0dd1080 | ||
|
|
ae1b114a13 | ||
|
|
3c9c28f351 | ||
|
|
93e6751e35 | ||
|
|
680781656b | ||
|
|
21382efd07 | ||
|
|
097e61ab9d | ||
|
|
52d83bd83b | ||
|
|
49cfe15abc | ||
|
|
0ef30c655a | ||
|
|
e2d211c188 | ||
|
|
62a1d91869 | ||
|
|
8c1347323e | ||
|
|
cb807e4aed | ||
|
|
bcc8d5f1fe | ||
|
|
59acd303fb | ||
|
|
0675cc8fdb | ||
|
|
ed27491118 | ||
|
|
abb28af68e | ||
|
|
18885d0cd7 | ||
|
|
ca56ac4e77 | ||
|
|
8f2b39b3ce | ||
|
|
761eebac1e | ||
|
|
8bdff0d681 | ||
|
|
55e0656375 | ||
|
|
e666b66ec0 | ||
|
|
cdb4f73803 | ||
|
|
b4c7345124 | ||
|
|
af8cc37eea | ||
|
|
28bed98ee4 | ||
|
|
3d39eb7db6 | ||
|
|
2c5f2e9f5c | ||
|
|
5ce54e5605 | ||
|
|
6c029a9d7d | ||
|
|
96f893c3ec | ||
|
|
f0047cf5a7 | ||
|
|
1b18aef0f0 | ||
|
|
80e13bffa2 | ||
|
|
384d16749c | ||
|
|
9c4ba1183b | ||
|
|
40a88e07d1 | ||
|
|
692ed760e0 | ||
|
|
6c3e451f32 | ||
|
|
24f511b567 | ||
|
|
89c6652bd6 | ||
|
|
8aca456285 | ||
|
|
824a465667 | ||
|
|
086c203e6b | ||
|
|
f746a9e742 | ||
|
|
90810d9098 | ||
|
|
75b3f52309 | ||
|
|
8ecb4696d4 | ||
|
|
7b22c9c97b | ||
|
|
84f0542b98 | ||
|
|
8faa40dfb6 | ||
|
|
47f7555d05 | ||
|
|
96d9cbd8af | ||
|
|
c8bc54aa48 | ||
|
|
fad0b8995a | ||
|
|
d4b6fa27e2 | ||
|
|
a37723fd32 | ||
|
|
fc5eefe532 | ||
|
|
ffd9b2a2f6 | ||
|
|
112f48ac08 | ||
|
|
95ec3d91b4 | ||
|
|
b0709d08cd | ||
|
|
a0e3cb87a4 | ||
|
|
1b9cc9e3db | ||
|
|
d9fb67bc43 | ||
|
|
a79022dce8 | ||
|
|
0a2ce690f4 | ||
|
|
bbc51114b0 |
14
.backportrc.json
Normal file
14
.backportrc.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"repoOwner": "prowler-cloud",
|
||||
"repoName": "prowler",
|
||||
"targetPRLabels": [
|
||||
"backport"
|
||||
],
|
||||
"sourcePRLabels": [
|
||||
"was-backported"
|
||||
],
|
||||
"copySourcePRLabels": false,
|
||||
"copySourcePRReviewers": true,
|
||||
"prTitle": "{{sourcePullRequest.title}}",
|
||||
"commitConflicts": true
|
||||
}
|
||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -20,6 +20,9 @@ updates:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
@@ -38,5 +41,6 @@ updates:
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v3"
|
||||
|
||||
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@@ -14,6 +14,7 @@ Please include a summary of the change and which issue is fixed. List any depend
|
||||
- If so, do we need to update permissions for the provider? Please review this carefully.
|
||||
- [ ] Review if the code is being covered by tests.
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
- [ ] Review if backport is needed.
|
||||
|
||||
### License
|
||||
|
||||
|
||||
42
.github/workflows/backport.yml
vendored
Normal file
42
.github/workflows/backport.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
name: Automatic Backport
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['labeled', 'closed']
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport PR
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
# Workaround not to fail the workflow if the PR does not need a backport
|
||||
# https://github.com/sorenlouv/backport-github-action/issues/127#issuecomment-2258561266
|
||||
- name: Check for backport labels
|
||||
id: check_labels
|
||||
run: |-
|
||||
labels='${{ toJSON(github.event.pull_request.labels.*.name) }}'
|
||||
echo "$labels"
|
||||
matched=$(echo "${labels}" | jq '. | map(select(startswith("backport-to-"))) | length')
|
||||
echo "matched=$matched"
|
||||
echo "matched=$matched" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Backport Action
|
||||
if: fromJSON(steps.check_labels.outputs.matched) > 0
|
||||
uses: sorenlouv/backport-github-action@v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: backport-to-
|
||||
|
||||
- name: Info log
|
||||
if: ${{ success() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
|
||||
run: cat ~/.backport/backport.info.log
|
||||
|
||||
- name: Debug log
|
||||
if: ${{ failure() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
|
||||
run: cat ~/.backport/backport.debug.log
|
||||
@@ -16,9 +16,9 @@ jobs:
|
||||
name: Documentation Link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Leave PR comment with the SaaS Documentation URI
|
||||
- name: Leave PR comment with the Prowler Documentation URI
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
body: |
|
||||
You can check the documentation for this PR here -> [SaaS Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
|
||||
You can check the documentation for this PR here -> [Prowler Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
|
||||
|
||||
17
.github/workflows/build-lint-push-containers.yml
vendored
17
.github/workflows/build-lint-push-containers.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
|
||||
prowler_version: ${{ steps.update-prowler-version.outputs.PROWLER_VERSION }}
|
||||
prowler_version: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION }}
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
|
||||
@@ -65,6 +65,8 @@ jobs:
|
||||
id: get-prowler-version
|
||||
run: |
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Store prowler version major just for the release
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
@@ -89,15 +91,6 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Update Prowler version (release)
|
||||
id: update-prowler-version
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
PROWLER_VERSION="${{ github.event.release.tag_name }}"
|
||||
poetry version "${PROWLER_VERSION}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -160,7 +153,7 @@ jobs:
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"v3-latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
|
||||
|
||||
@@ -169,6 +162,6 @@ jobs:
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ needs.container-build-push.outputs.prowler_version }}"}}'
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -13,10 +13,10 @@ name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", "v3" ]
|
||||
branches: [ "master", "v3", "v4.*" ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "master", "v3" ]
|
||||
branches: [ "master", "v3", "v4.*" ]
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
|
||||
2
.github/workflows/find-secrets.yml
vendored
2
.github/workflows/find-secrets.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@3.80.4
|
||||
uses: trufflesecurity/trufflehog@v3.83.4
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
1
.github/workflows/labeler.yml
vendored
1
.github/workflows/labeler.yml
vendored
@@ -5,6 +5,7 @@ on:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
|
||||
5
.github/workflows/pull-request.yml
vendored
5
.github/workflows/pull-request.yml
vendored
@@ -5,10 +5,12 @@ on:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -20,7 +22,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v44
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -29,6 +31,7 @@ jobs:
|
||||
docs/**
|
||||
permissions/**
|
||||
mkdocs.yml
|
||||
.backportrc.json
|
||||
- name: Install poetry
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
|
||||
31
.github/workflows/pypi-release.yml
vendored
31
.github/workflows/pypi-release.yml
vendored
@@ -8,8 +8,6 @@ env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: 3.11
|
||||
CACHE: "poetry"
|
||||
# TODO: create a bot user for this kind of tasks, like prowler-bot
|
||||
GIT_COMMITTER_EMAIL: "sergio@prowler.com"
|
||||
|
||||
jobs:
|
||||
release-prowler-job:
|
||||
@@ -40,7 +38,6 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -48,34 +45,6 @@ jobs:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: ${{ env.CACHE }}
|
||||
|
||||
- name: Update Poetry and config version
|
||||
run: |
|
||||
poetry version ${{ env.RELEASE_TAG }}
|
||||
|
||||
- name: Import GPG key
|
||||
uses: crazy-max/ghaction-import-gpg@v6
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
git_user_signingkey: true
|
||||
git_commit_gpgsign: true
|
||||
|
||||
- name: Push updated version to the release tag
|
||||
run: |
|
||||
# Configure Git
|
||||
git config user.name "github-actions"
|
||||
git config user.email "${{ env.GIT_COMMITTER_EMAIL }}"
|
||||
|
||||
# Add the files with the version changed
|
||||
git add prowler/config/config.py pyproject.toml
|
||||
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify -S
|
||||
|
||||
# Replace the tag with the version updated
|
||||
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}" --sign
|
||||
|
||||
# Push the tag
|
||||
git push -f origin ${{ env.RELEASE_TAG }}
|
||||
|
||||
- name: Build Prowler package
|
||||
run: |
|
||||
poetry build
|
||||
|
||||
@@ -50,13 +50,13 @@ jobs:
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services."
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services"
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services."
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@ repos:
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["commit", "push"]
|
||||
stages: ["pre-commit", "pre-push"]
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
|
||||
13
Dockerfile
13
Dockerfile
@@ -2,29 +2,28 @@ FROM python:3.12-alpine
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
# Update system dependencies
|
||||
# Update system dependencies and install essential tools
|
||||
#hadolint ignore=DL3018
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl git g++
|
||||
|
||||
# Create nonroot user
|
||||
# Create non-root user
|
||||
RUN mkdir -p /home/prowler && \
|
||||
echo 'prowler:x:1000:1000:prowler:/home/prowler:' > /etc/passwd && \
|
||||
echo 'prowler:x:1000:' > /etc/group && \
|
||||
chown -R prowler:prowler /home/prowler
|
||||
USER prowler
|
||||
|
||||
# Copy necessary files
|
||||
# Copy necessary files
|
||||
WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler
|
||||
|
||||
# Install dependencies
|
||||
# Install Python dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
ENV PATH="$HOME/.local/bin:$PATH"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
|
||||
pip install --no-cache-dir .
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
<p align="center">
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/38561120/3c8b4ec5-6849-41a5-b5e1-52bbb94af73a"></a>
|
||||
<br>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog">Join our Prowler community!</a>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-2oinmgmw6-cl7gOrljSEqo_aoripVPFA">Join our Prowler community!</a>
|
||||
</p>
|
||||
<hr>
|
||||
<p align="center">
|
||||
@@ -63,9 +63,9 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 385 | 67 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 7 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 135 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| AWS | 553 | 77 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 2 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 138 | 17 -> `prowler azure --list-services` | 3 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
|
||||
# 💻 Installation
|
||||
|
||||
@@ -12,7 +12,11 @@ Originally based on [org-multi-account](https://github.com/prowler-cloud/prowler
|
||||
|
||||
## Architecture Explanation
|
||||
|
||||
The solution is designed to be very simple. Prowler is run via an ECS Task definition that launches a single Fargate container. This Task Definition is executed on a schedule using an EventBridge Rule.
|
||||
The solution is designed to be very simple. Prowler is run via an ECS Task definition that launches a single Fargate container. This Task Definition is executed on a schedule using an EventBridge Rule.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
This solution assumes that you have a VPC architecture with two redundant subnets that can reach the AWS API endpoints (e.g. PrivateLink, NAT Gateway, etc.).
|
||||
|
||||
## CloudFormation Templates
|
||||
|
||||
@@ -59,9 +63,9 @@ The logs that are generated and sent to Cloudwatch are error logs, and assessmen
|
||||
|
||||
## Instructions
|
||||
1. Create a Private ECR Repository in the account that will host the Prowler container. The Audit account is recommended, but any account can be used.
|
||||
2. Configure the .awsvariables file. Note the ROLE name chosen as it will be the CrossAccountRole.
|
||||
3. Follow the steps from "View Push Commands" to build and upload the container image. You need to have Docker and AWS CLI installed, and use the cli to login to the account first. After upload note the Image URI, as it is required for the CF-Prowler-ECS template.
|
||||
4. Make sure SecurityHub is enabled in every account in AWS Organizations, and that the SecurityHub integration is enabled as explained in [Prowler - Security Hub Integration](https://github.com/prowler-cloud/prowler#security-hub-integration)
|
||||
2. Configure the .awsvariables file. Note the ROLE name chosen as it will be the CrossAccountRole.
|
||||
3. Follow the steps from "View Push Commands" to build and upload the container image. Substitute step 2 with the build command provided in the Dockerfile. You need to have Docker and AWS CLI installed, and use the cli to login to the account first. After upload note the Image URI, as it is required for the CF-Prowler-ECS template. Ensure that you pay attention to the architecture while performing the docker build command. A common mistake is not specifying the architecture and then building on Apple silicon. Your task will fail with *exec /home/prowler/.local/bin/prowler: exec format error*.
|
||||
4. Make sure SecurityHub is enabled in every account in AWS Organizations, and that the SecurityHub integration is enabled as explained in [Prowler - Security Hub Integration](https://github.com/prowler-cloud/prowler#security-hub-integration)
|
||||
5. Deploy **CF-Prowler-CrossAccountRole.yml** in the Master Account as a single stack. You will have to choose the CrossAccountRole name (ProwlerXA-Role by default) and the ProwlerTaskRoleName (ProwlerECSTask-Role by default)
|
||||
6. Deploy **CF-Prowler-CrossAccountRole.yml** in every Member Account as a StackSet. Choose the same CrossAccountName and ProwlerTaskRoleName as the previous step.
|
||||
7. Deploy **CF-Prowler-IAM.yml** in the account that will host the Prowler container (the same from step 1). The following template parameters must be provided:
|
||||
@@ -91,4 +95,4 @@ If you permission find errors in the CloudWatch logs, the culprit might be a [Se
|
||||
## Upgrading Prowler
|
||||
|
||||
Prowler version is controlled by the PROWLERVER argument in the Dockerfile, change it to the desired version and follow the ECR Push Commands to update the container image.
|
||||
Old images can be deleted from the ECR Repository after the new image is confirmed to work. They will show as "untagged" as only one image can hold the "latest" tag.
|
||||
Old images can be deleted from the ECR Repository after the new image is confirmed to work. They will show as "untagged" as only one image can hold the "latest" tag.
|
||||
|
||||
@@ -68,7 +68,7 @@ for accountId in ${ACCOUNTS_IN_ORGS}; do
|
||||
# Run Prowler
|
||||
echo -e "Assessing AWS Account: ${accountId}, using Role: ${ROLE} on $(date)"
|
||||
# Pipe stdout to /dev/null to reduce unnecessary Cloudwatch logs
|
||||
prowler aws -R arn:"${PARTITION}":iam::"${accountId}":role/"${ROLE}" -q -S -f "${REGION}" > /dev/null
|
||||
prowler aws -R arn:"${PARTITION}":iam::"${accountId}":role/"${ROLE}" --security-hub --send-sh-only-fails -f "${REGION}" > /dev/null
|
||||
TOTAL_SEC=$((SECONDS - START_TIME))
|
||||
printf "Completed AWS Account: ${accountId} in %02dh:%02dm:%02ds" $((TOTAL_SEC / 3600)) $((TOTAL_SEC % 3600 / 60)) $((TOTAL_SEC % 60))
|
||||
echo ""
|
||||
|
||||
@@ -60,24 +60,42 @@ Resources:
|
||||
Effect: Allow
|
||||
Resource: "*"
|
||||
Action:
|
||||
- ds:ListAuthorizedApplications
|
||||
- account:Get*
|
||||
- appstream:Describe*
|
||||
- appstream:List*
|
||||
- backup:List*
|
||||
- cloudtrail:GetInsightSelectors
|
||||
- codeartifact:List*
|
||||
- codebuild:BatchGet*
|
||||
- cognito-idp:GetUserPoolMfaConfig
|
||||
- dlm:Get*
|
||||
- drs:Describe*
|
||||
- ds:Describe*
|
||||
- ds:Get*
|
||||
- ds:List*
|
||||
- dynamodb:GetResourcePolicy
|
||||
- ec2:GetEbsEncryptionByDefault
|
||||
- ec2:GetSnapshotBlockPublicAccessState
|
||||
- ec2:GetInstanceMetadataDefaults
|
||||
- ecr:Describe*
|
||||
- ecr:GetRegistryScanningConfiguration
|
||||
- elasticfilesystem:DescribeBackupPolicy
|
||||
- glue:GetConnections
|
||||
- glue:GetSecurityConfiguration
|
||||
- glue:GetSecurityConfiguration*
|
||||
- glue:SearchTables
|
||||
- lambda:GetFunction
|
||||
- lambda:GetFunction*
|
||||
- logs:FilterLogEvents
|
||||
- lightsail:GetRelationalDatabases
|
||||
- macie2:GetMacieSession
|
||||
- s3:GetAccountPublicAccessBlock
|
||||
- shield:DescribeProtection
|
||||
- shield:GetSubscriptionState
|
||||
- ssm:GetDocument
|
||||
- ssm-incidents:List*
|
||||
- support:Describe*
|
||||
- tag:GetTagKeys
|
||||
- PolicyName: Prowler-Security-Hub
|
||||
PolicyDocument:
|
||||
Version: 2012-10-17
|
||||
Statement:
|
||||
- wellarchitected:List*
|
||||
|
||||
- Sid: AllowProwlerSecurityHub
|
||||
Effect: Allow
|
||||
Resource: "*"
|
||||
|
||||
@@ -62,7 +62,7 @@ Resources:
|
||||
awslogs-stream-prefix: ecs
|
||||
Cpu: 1024
|
||||
ExecutionRoleArn: !Ref ECSExecutionRole
|
||||
Memory: 2048
|
||||
Memory: 8192
|
||||
NetworkMode: awsvpc
|
||||
TaskRoleArn: !Ref ProwlerTaskRole
|
||||
Family: SecurityHubProwlerTask
|
||||
|
||||
@@ -97,9 +97,15 @@ Outputs:
|
||||
ECSExecutionRoleARN:
|
||||
Description: ARN of the ECS Task Execution Role
|
||||
Value: !GetAtt ECSExecutionRole.Arn
|
||||
Export:
|
||||
Name: ECSExecutionRoleArn
|
||||
ProwlerTaskRoleARN:
|
||||
Description: ARN of the ECS Prowler Task Role
|
||||
Value: !GetAtt ProwlerTaskRole.Arn
|
||||
Export:
|
||||
Name: ProwlerTaskRoleArn
|
||||
ECSEventRoleARN:
|
||||
Description: ARN of the Eventbridge Task Role
|
||||
Value: !GetAtt ECSEventRole.Arn
|
||||
Export:
|
||||
Name: ECSEventRoleARN
|
||||
|
||||
@@ -2223,3 +2223,232 @@ def get_section_containers_ens(data, section_1, section_2, section_3, section_4)
|
||||
section_containers.append(section_container)
|
||||
|
||||
return html.Div(section_containers, className="compliance-data-layout")
|
||||
|
||||
|
||||
# This function extracts and compares up to two numeric values, ensuring correct sorting for version-like strings.
|
||||
def extract_numeric_values(value):
|
||||
numbers = re.findall(r"\d+", str(value))
|
||||
if len(numbers) >= 2:
|
||||
return int(numbers[0]), int(numbers[1])
|
||||
elif len(numbers) == 1:
|
||||
return int(numbers[0]), 0
|
||||
return 0, 0
|
||||
|
||||
|
||||
def get_section_containers_kisa_ismsp(data, section_1, section_2):
|
||||
data["STATUS"] = data["STATUS"].apply(map_status_to_icon)
|
||||
data[section_1] = data[section_1].astype(str)
|
||||
data[section_2] = data[section_2].astype(str)
|
||||
data.sort_values(
|
||||
by=section_1,
|
||||
key=lambda x: x.map(extract_numeric_values),
|
||||
ascending=True,
|
||||
inplace=True,
|
||||
)
|
||||
|
||||
findings_counts_section = (
|
||||
data.groupby([section_2, "STATUS"]).size().unstack(fill_value=0)
|
||||
)
|
||||
findings_counts_name = (
|
||||
data.groupby([section_1, "STATUS"]).size().unstack(fill_value=0)
|
||||
)
|
||||
|
||||
section_containers = []
|
||||
|
||||
for name in data[section_1].unique():
|
||||
success_name = (
|
||||
findings_counts_name.loc[name, pass_emoji]
|
||||
if pass_emoji in findings_counts_name.columns
|
||||
else 0
|
||||
)
|
||||
failed_name = (
|
||||
findings_counts_name.loc[name, fail_emoji]
|
||||
if fail_emoji in findings_counts_name.columns
|
||||
else 0
|
||||
)
|
||||
|
||||
fig_name = go.Figure(
|
||||
data=[
|
||||
go.Bar(
|
||||
name="Failed",
|
||||
x=[failed_name],
|
||||
y=[""],
|
||||
orientation="h",
|
||||
marker=dict(color="#e77676"),
|
||||
width=[0.8],
|
||||
),
|
||||
go.Bar(
|
||||
name="Success",
|
||||
x=[success_name],
|
||||
y=[""],
|
||||
orientation="h",
|
||||
marker=dict(color="#45cc6e"),
|
||||
width=[0.8],
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
fig_name.update_layout(
|
||||
barmode="stack",
|
||||
margin=dict(l=10, r=10, t=10, b=10),
|
||||
paper_bgcolor="rgba(0,0,0,0)",
|
||||
plot_bgcolor="rgba(0,0,0,0)",
|
||||
showlegend=False,
|
||||
width=350,
|
||||
height=30,
|
||||
xaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
|
||||
yaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
|
||||
annotations=[
|
||||
dict(
|
||||
x=success_name + failed_name,
|
||||
y=0,
|
||||
xref="x",
|
||||
yref="y",
|
||||
text=str(success_name),
|
||||
showarrow=False,
|
||||
font=dict(color="#45cc6e", size=14),
|
||||
xanchor="left",
|
||||
yanchor="middle",
|
||||
),
|
||||
dict(
|
||||
x=0,
|
||||
y=0,
|
||||
xref="x",
|
||||
yref="y",
|
||||
text=str(failed_name),
|
||||
showarrow=False,
|
||||
font=dict(color="#e77676", size=14),
|
||||
xanchor="right",
|
||||
yanchor="middle",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
graph_name = dcc.Graph(
|
||||
figure=fig_name, config={"staticPlot": True}, className="info-bar"
|
||||
)
|
||||
|
||||
graph_div = html.Div(graph_name, className="graph-section")
|
||||
|
||||
direct_internal_items = []
|
||||
|
||||
for section in data[data[section_1] == name][section_2].unique():
|
||||
specific_data = data[
|
||||
(data[section_1] == name) & (data[section_2] == section)
|
||||
]
|
||||
success_section = (
|
||||
findings_counts_section.loc[section, pass_emoji]
|
||||
if pass_emoji in findings_counts_section.columns
|
||||
else 0
|
||||
)
|
||||
failed_section = (
|
||||
findings_counts_section.loc[section, fail_emoji]
|
||||
if fail_emoji in findings_counts_section.columns
|
||||
else 0
|
||||
)
|
||||
|
||||
data_table = dash_table.DataTable(
|
||||
data=specific_data.to_dict("records"),
|
||||
columns=[
|
||||
{"name": i, "id": i}
|
||||
for i in ["CHECKID", "STATUS", "REGION", "ACCOUNTID", "RESOURCEID"]
|
||||
],
|
||||
style_table={"overflowX": "auto"},
|
||||
style_as_list_view=True,
|
||||
style_cell={"textAlign": "left", "padding": "5px"},
|
||||
)
|
||||
|
||||
fig_section = go.Figure(
|
||||
data=[
|
||||
go.Bar(
|
||||
name="Failed",
|
||||
x=[failed_section],
|
||||
y=[""],
|
||||
orientation="h",
|
||||
marker=dict(color="#e77676"),
|
||||
),
|
||||
go.Bar(
|
||||
name="Success",
|
||||
x=[success_section],
|
||||
y=[""],
|
||||
orientation="h",
|
||||
marker=dict(color="#45cc6e"),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
fig_section.update_layout(
|
||||
barmode="stack",
|
||||
margin=dict(l=10, r=10, t=10, b=10),
|
||||
paper_bgcolor="rgba(0,0,0,0)",
|
||||
plot_bgcolor="rgba(0,0,0,0)",
|
||||
showlegend=False,
|
||||
width=350,
|
||||
height=30,
|
||||
xaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
|
||||
yaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
|
||||
annotations=[
|
||||
dict(
|
||||
x=success_section + failed_section,
|
||||
y=0,
|
||||
xref="x",
|
||||
yref="y",
|
||||
text=str(success_section),
|
||||
showarrow=False,
|
||||
font=dict(color="#45cc6e", size=14),
|
||||
xanchor="left",
|
||||
yanchor="middle",
|
||||
),
|
||||
dict(
|
||||
x=0,
|
||||
y=0,
|
||||
xref="x",
|
||||
yref="y",
|
||||
text=str(failed_section),
|
||||
showarrow=False,
|
||||
font=dict(color="#e77676", size=14),
|
||||
xanchor="right",
|
||||
yanchor="middle",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
graph_section = dcc.Graph(
|
||||
figure=fig_section,
|
||||
config={"staticPlot": True},
|
||||
className="info-bar-child",
|
||||
)
|
||||
|
||||
graph_div_section = html.Div(graph_section, className="graph-section-req")
|
||||
|
||||
internal_accordion_item = dbc.AccordionItem(
|
||||
title=section,
|
||||
children=[html.Div([data_table], className="inner-accordion-content")],
|
||||
)
|
||||
|
||||
internal_section_container = html.Div(
|
||||
[
|
||||
graph_div_section,
|
||||
dbc.Accordion(
|
||||
[internal_accordion_item], start_collapsed=True, flush=True
|
||||
),
|
||||
],
|
||||
className="accordion-inner--child",
|
||||
)
|
||||
|
||||
direct_internal_items.append(internal_section_container)
|
||||
|
||||
accordion_item = dbc.AccordionItem(
|
||||
title=f"{name}", children=direct_internal_items
|
||||
)
|
||||
section_container = html.Div(
|
||||
[
|
||||
graph_div,
|
||||
dbc.Accordion([accordion_item], start_collapsed=True, flush=True),
|
||||
],
|
||||
className="accordion-inner",
|
||||
)
|
||||
|
||||
section_containers.append(section_container)
|
||||
|
||||
return html.Div(section_containers, className="compliance-data-layout")
|
||||
|
||||
25
dashboard/compliance/kisa_isms_p_2023_aws.py
Normal file
25
dashboard/compliance/kisa_isms_p_2023_aws.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_kisa_ismsp
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBDOMAIN",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
# "REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_kisa_ismsp(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SUBDOMAIN", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
25
dashboard/compliance/kisa_isms_p_2023_korean_aws.py
Normal file
25
dashboard/compliance/kisa_isms_p_2023_korean_aws.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_kisa_ismsp
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBDOMAIN",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
# "REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_kisa_ismsp(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SUBDOMAIN", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
@@ -222,7 +222,7 @@ class ec2_securitygroup_with_many_ingress_egress_rules(Check):
|
||||
max_security_group_rules = ec2_client.audit_config.get(
|
||||
"max_security_group_rules", 50
|
||||
)
|
||||
for security_group in ec2_client.security_groups:
|
||||
for security_group_arn, security_group in ec2_client.security_groups.items():
|
||||
```
|
||||
|
||||
```yaml title="config.yaml"
|
||||
@@ -272,7 +272,7 @@ Each Prowler check has metadata associated which is stored at the same level of
|
||||
# Severity holds the check's severity, always in lowercase (critical, high, medium, low or informational)
|
||||
"Severity": "critical",
|
||||
# ResourceType only for AWS, holds the type from here
|
||||
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html
|
||||
# https://docs.aws.amazon.com/securityhub/latest/userguide/asff-resources.html
|
||||
"ResourceType": "Other",
|
||||
# Description holds the title of the check, for now is the same as CheckTitle
|
||||
"Description": "Ensure there are no EC2 AMIs set as Public.",
|
||||
|
||||
@@ -14,10 +14,8 @@ Once that is satisfied go ahead and clone your forked repo:
|
||||
git clone https://github.com/<your-github-user>/prowler
|
||||
cd prowler
|
||||
```
|
||||
For isolation and avoid conflicts with other environments, we recommend usage of `poetry`:
|
||||
```
|
||||
pip install poetry
|
||||
```
|
||||
For isolation and to avoid conflicts with other environments, we recommend using `poetry`, a Python dependency management tool. You can install it by following the instructions [here](https://python-poetry.org/docs/#installation).
|
||||
|
||||
Then install all dependencies including the ones for developers:
|
||||
```
|
||||
poetry install --with dev
|
||||
@@ -50,6 +48,8 @@ You can see all dependencies in file `pyproject.toml`.
|
||||
|
||||
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) on the latest version to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
|
||||
|
||||
Additionally, please ensure to follow the code documentation practices outlined in this guide: [Google Python Style Guide - Comments and Docstrings](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings).
|
||||
|
||||
???+ note
|
||||
If you have any trouble when committing to the Prowler repository, add the `--no-verify` flag to the `git commit` command.
|
||||
|
||||
|
||||
@@ -44,7 +44,6 @@ class Provider(ABC):
|
||||
Methods:
|
||||
print_credentials(): Displays the provider's credentials used for auditing in the command-line interface.
|
||||
setup_session(): Sets up the session for the provider.
|
||||
get_output_mapping(): Returns the output mapping between the provider and the generic model.
|
||||
validate_arguments(): Validates the arguments for the provider.
|
||||
get_checks_to_execute_by_audit_resources(): Returns a set of checks based on the input resources to scan.
|
||||
|
||||
@@ -131,15 +130,6 @@ class Provider(ABC):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def get_output_mapping(self) -> dict:
|
||||
"""
|
||||
get_output_mapping returns the output mapping between the provider and the generic model.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def validate_arguments(self) -> None:
|
||||
"""
|
||||
validate_arguments validates the arguments for the provider.
|
||||
|
||||
@@ -592,7 +592,7 @@ is following the actual format, add one function where the client is passed to b
|
||||
`mock_api_<endpoint>_calls` (*endpoint* refers to the first attribute pointed after *client*).
|
||||
|
||||
In the example of BigQuery the function is called `mock_api_dataset_calls`. And inside of this function we found an assignation to
|
||||
be used in the `__get_datasets__` method in BigQuery class:
|
||||
be used in the `_get_datasets` method in BigQuery class:
|
||||
|
||||
```python
|
||||
# Mocking datasets
|
||||
@@ -765,7 +765,7 @@ from tests.providers.azure.azure_fixtures import (
|
||||
set_mocked_azure_provider,
|
||||
)
|
||||
|
||||
# Function to mock the service function __get_components__, this function task is to return a possible value that real function could returns
|
||||
# Function to mock the service function _get_components, this function task is to return a possible value that real function could returns
|
||||
def mock_appinsights_get_components(_):
|
||||
return {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
@@ -779,12 +779,12 @@ def mock_appinsights_get_components(_):
|
||||
|
||||
# Patch decorator to use the mocked function instead the function with the real API call
|
||||
@patch(
|
||||
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights.__get_components__",
|
||||
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights._get_components",
|
||||
new=mock_appinsights_get_components,
|
||||
)
|
||||
class Test_AppInsights_Service:
|
||||
# Mandatory test for every service, this method test the instance of the client is correct
|
||||
def test__get_client__(self):
|
||||
def test_get_client(self):
|
||||
app_insights = AppInsights(set_mocked_azure_provider())
|
||||
assert (
|
||||
app_insights.clients[AZURE_SUBSCRIPTION_ID].__class__.__name__
|
||||
@@ -794,8 +794,8 @@ class Test_AppInsights_Service:
|
||||
def test__get_subscriptions__(self):
|
||||
app_insights = AppInsights(set_mocked_azure_provider())
|
||||
assert app_insights.subscriptions.__class__.__name__ == "dict"
|
||||
# Test for the function __get_components__, inside this client is used the mocked function
|
||||
def test__get_components__(self):
|
||||
# Test for the function _get_components, inside this client is used the mocked function
|
||||
def test_get_components(self):
|
||||
appinsights = AppInsights(set_mocked_azure_provider())
|
||||
assert len(appinsights.components) == 1
|
||||
assert (
|
||||
|
||||
109
docs/index.md
109
docs/index.md
@@ -19,14 +19,40 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
## Quick Start
|
||||
### Installation
|
||||
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/), thus can be installed using pip with `Python >= 3.9`:
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/), thus can be installed as Python package with `Python >= 3.9`:
|
||||
|
||||
=== "Generic"
|
||||
=== "pipx"
|
||||
|
||||
[pipx](https://pipx.pypa.io/stable/) is a tool to install Python applications in isolated environments. It is recommended to use `pipx` for a global installation.
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* `Python >= 3.9`
|
||||
* `Python pip >= 3.9`
|
||||
* `pipx` installed: [pipx installation](https://pipx.pypa.io/stable/installation/).
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
``` bash
|
||||
pipx install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
To upgrade Prowler to the latest version, run:
|
||||
|
||||
``` bash
|
||||
pipx upgrade prowler
|
||||
```
|
||||
|
||||
=== "pip"
|
||||
|
||||
???+ warning
|
||||
This method is not recommended because it will modify the environment which you choose to install. Consider using [pipx](https://docs.prowler.com/projects/prowler-open-source/en/latest/#__tabbed_1_1) for a global installation.
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* `Python >= 3.9`
|
||||
* `Python pip >= 21.0.0`
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
@@ -36,13 +62,19 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
prowler -v
|
||||
```
|
||||
|
||||
To upgrade Prowler to the latest version, run:
|
||||
|
||||
``` bash
|
||||
pip install --upgrade prowler
|
||||
```
|
||||
|
||||
=== "Docker"
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* Have `docker` installed: https://docs.docker.com/get-docker/.
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* In the command below, change `-v` to your local directory path in order to access the reports.
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
@@ -54,41 +86,21 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
--env AWS_SESSION_TOKEN toniblyx/prowler:latest
|
||||
```
|
||||
|
||||
=== "Ubuntu"
|
||||
|
||||
_Requirements for Ubuntu 20.04.3 LTS_:
|
||||
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* Install python 3.9 with: `sudo apt-get install python3.9`
|
||||
* Remove python 3.8 to avoid conflicts if you can: `sudo apt-get remove python3.8`
|
||||
* Make sure you have the python3 distutils package installed: `sudo apt-get install python3-distutils`
|
||||
* To make sure you use pip for 3.9 get the get-pip script with: `curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py`
|
||||
* Execute it with the proper python version: `sudo python3.9 get-pip.py`
|
||||
* Now you should have pip for 3.9 ready: `pip3.9 --version`
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
pip3.9 install prowler
|
||||
export PATH=$PATH:/home/$HOME/.local/bin/
|
||||
prowler -v
|
||||
```
|
||||
|
||||
=== "GitHub"
|
||||
|
||||
_Requirements for Developers_:
|
||||
|
||||
* `git`
|
||||
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* `git`, `Python >= 3.9`, `pip` and `poetry` installed (`pip install poetry`)
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
poetry run python prowler.py -v
|
||||
```
|
||||
???+ note
|
||||
If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
@@ -97,15 +109,33 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* `Python >= 3.9`
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* Latest Amazon Linux 2 should come with Python 3.9 already installed however it may need pip. Install Python pip 3.9 with: `sudo yum install -y python3-pip`.
|
||||
* Make sure setuptools for python is already installed with: `pip3 install setuptools`
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
pip3.9 install prowler
|
||||
export PATH=$PATH:/home/$HOME/.local/bin/
|
||||
python3 -m pip install --user pipx
|
||||
python3 -m pipx ensurepath
|
||||
pipx install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
=== "Ubuntu"
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* `Ubuntu 23.04` or above, if you are using an older version of Ubuntu check [pipx installation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#__tabbed_1_1) and ensure you have `Python >= 3.9`.
|
||||
* `Python >= 3.9`
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
``` bash
|
||||
sudo apt update
|
||||
sudo apt install pipx
|
||||
pipx ensurepath
|
||||
pipx install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
@@ -125,7 +155,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
=== "AWS CloudShell"
|
||||
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [2](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [[2]](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
|
||||
|
||||
_Requirements_:
|
||||
|
||||
@@ -133,11 +163,13 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
```bash
|
||||
sudo bash
|
||||
adduser prowler
|
||||
su prowler
|
||||
pip install prowler
|
||||
python3 -m pip install --user pipx
|
||||
python3 -m pipx ensurepath
|
||||
pipx install prowler
|
||||
cd /tmp
|
||||
prowler aws
|
||||
```
|
||||
@@ -153,9 +185,12 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```bash
|
||||
python3 -m pip install --user pipx
|
||||
python3 -m pipx ensurepath
|
||||
pipx install prowler
|
||||
cd /tmp
|
||||
prowler azure --az-cli-auth
|
||||
```
|
||||
|
||||
## Prowler container versions
|
||||
|
||||
@@ -4,21 +4,25 @@ Prowler allows you to do threat detection in AWS based on the CloudTrail log rec
|
||||
```
|
||||
prowler aws --category threat-detection
|
||||
```
|
||||
This comand will run these checks:
|
||||
This command will run these checks:
|
||||
|
||||
* `cloudtrail_threat_detection_privilege_escalation`
|
||||
* `cloudtrail_threat_detection_enumeration`
|
||||
* `cloudtrail_threat_detection_privilege_escalation` -> Detects privilege escalation attacks.
|
||||
* `cloudtrail_threat_detection_enumeration` -> Detects enumeration attacks.
|
||||
* `cloudtrail_threat_detection_llm_jacking` -> Detects LLM Jacking attacks.
|
||||
|
||||
???+ note
|
||||
Threat Detection checks will be only executed using `--category threat-detection` flag due to preformance.
|
||||
Threat Detection checks will be only executed using `--category threat-detection` flag due to performance.
|
||||
|
||||
## Config File
|
||||
|
||||
If you want to manage the behavior of the Threat Detection checks you can edit `config.yaml` file from `/prowler/config`. In this file you can edit the following attributes related with Threat Detection:
|
||||
|
||||
* `threat_detection_privilege_escalation_threshold`: determines the percentage of actions found to decide if it is an privilege_scalation attack event, by default is 0.1 (10%)
|
||||
* `threat_detection_privilege_escalation_threshold`: determines the percentage of actions found to decide if it is an privilege_scalation attack event, by default is 0.2 (20%)
|
||||
* `threat_detection_privilege_escalation_minutes`: it is the past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
|
||||
* `threat_detection_privilege_escalation_actions`: these are the default actions related with priviledge scalation.
|
||||
* `threat_detection_enumeration_threshold`: determines the percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
|
||||
* `threat_detection_privilege_escalation_actions`: these are the default actions related with privilege escalation.
|
||||
* `threat_detection_enumeration_threshold`: determines the percentage of actions found to decide if it is an enumeration attack event, by default is 0.3 (30%)
|
||||
* `threat_detection_enumeration_minutes`: it is the past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
|
||||
* `threat_detection_enumeration_actions`: these are the default actions related with enumeration attacks.
|
||||
* `threat_detection_llm_jacking_threshold`: determines the percentage of actions found to decide if it is an LLM Jacking attack event, by default is 0.4 (40%)
|
||||
* `threat_detection_llm_jacking_minutes`: it is the past minutes to search from now for LLM Jacking attacks, by default is 1440 minutes (24 hours)
|
||||
* `threat_detection_llm_jacking_actions`: these are the default actions related with LLM Jacking attacks.
|
||||
|
||||
@@ -7,7 +7,6 @@ At the time of writing this documentation the available Azure Clouds from differ
|
||||
- AzureCloud
|
||||
- AzureChinaCloud
|
||||
- AzureUSGovernment
|
||||
- AzureGermanCloud
|
||||
|
||||
If you want to change the default one you must include the flag `--azure-region`, i.e.:
|
||||
|
||||
|
||||
@@ -13,37 +13,57 @@ The following list includes all the AWS checks with configurable variables that
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
|
||||
| `iam_user_accesskey_unused` | `max_unused_access_keys_days` | Integer |
|
||||
| `iam_user_console_access_unused` | `max_console_access_days` | Integer |
|
||||
| `ec2_elastic_ip_shodan` | `shodan_api_key` | String |
|
||||
| `ec2_securitygroup_with_many_ingress_egress_rules` | `max_security_group_rules` | Integer |
|
||||
| `ec2_instance_older_than_specific_days` | `max_ec2_instance_age_in_days` | Integer |
|
||||
| `vpc_endpoint_connections_trust_boundaries` | `trusted_account_ids` | List of Strings |
|
||||
| `vpc_endpoint_services_allowed_principals_trust_boundaries` | `trusted_account_ids` | List of Strings |
|
||||
| `cloudwatch_log_group_retention_policy_specific_days_enabled` | `log_group_retention_days` | Integer |
|
||||
| `appstream_fleet_session_idle_disconnect_timeout` | `max_idle_disconnect_timeout_in_seconds` | Integer |
|
||||
| `appstream_fleet_session_disconnect_timeout` | `max_disconnect_timeout_in_seconds` | Integer |
|
||||
| `acm_certificates_expiration_check` | `days_to_expire_threshold` | Integer |
|
||||
| `appstream_fleet_maximum_session_duration` | `max_session_duration_seconds` | Integer |
|
||||
| `appstream_fleet_session_disconnect_timeout` | `max_disconnect_timeout_in_seconds` | Integer |
|
||||
| `appstream_fleet_session_idle_disconnect_timeout` | `max_idle_disconnect_timeout_in_seconds` | Integer |
|
||||
| `autoscaling_find_secrets_ec2_launch_configuration` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `awslambda_function_no_secrets_in_code` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `awslambda_function_no_secrets_in_variables` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `awslambda_function_using_supported_runtimes` | `obsolete_lambda_runtimes` | Integer |
|
||||
| `organizations_scp_check_deny_regions` | `organizations_enabled_regions` | List of Strings |
|
||||
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
|
||||
| `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String |
|
||||
| `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean |
|
||||
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
|
||||
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
|
||||
| `awslambda_function_vpc_is_in_multi_azs` | `lambda_min_azs` | Integer |
|
||||
| `cloudformation_stack_outputs_find_secrets` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
|
||||
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
|
||||
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_interface_types` | List of Strings |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
|
||||
| `cloudwatch_log_group_no_secrets_in_logs` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `cloudwatch_log_group_no_critical_pii_in_logs` | `critical_pii_entities` | List of Strings |
|
||||
| `cloudwatch_log_group_no_critical_pii_in_logs` | `pii_language` | String |
|
||||
| `cloudwatch_log_group_retention_policy_specific_days_enabled` | `log_group_retention_days` | Integer |
|
||||
| `codebuild_project_no_secrets_in_variables` | `excluded_sensitive_environment_variables` | List of Strings |
|
||||
| `codebuild_project_no_secrets_in_variables` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
|
||||
| `ec2_elastic_ip_shodan` | `shodan_api_key` | String |
|
||||
| `ec2_instance_older_than_specific_days` | `max_ec2_instance_age_in_days` | Integer |
|
||||
| `ec2_instance_secrets_user_data` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `ec2_launch_template_no_secrets` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_instance_owners` | List of Strings |
|
||||
| `acm_certificates_expiration_check` | `days_to_expire_threshold` | Integer |
|
||||
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_interface_types` | List of Strings |
|
||||
| `ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports`| `ec2_sg_high_risk_ports` | List of Integer |
|
||||
| `ec2_securitygroup_with_many_ingress_egress_rules` | `max_security_group_rules` | Integer |
|
||||
| `ecs_task_definitions_no_environment_secrets` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String |
|
||||
| `eks_cluster_uses_a_supported_version` | `eks_cluster_oldest_version_supported` | String |
|
||||
| `eks_control_plane_logging_all_types_enabled` | `eks_required_log_types` | List of Strings |
|
||||
| `elb_is_in_multiple_az` | `elb_min_azs` | Integer |
|
||||
| `elbv2_is_in_multiple_az` | `elbv2_min_azs` | Integer |
|
||||
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `iam_user_accesskey_unused` | `max_unused_access_keys_days` | Integer |
|
||||
| `iam_user_console_access_unused` | `max_console_access_days` | Integer |
|
||||
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
|
||||
| `organizations_scp_check_deny_regions` | `organizations_enabled_regions` | List of Strings |
|
||||
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
|
||||
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `secretsmanager_secret_unused` | `max_days_secret_unused` | Integer |
|
||||
| `secretsmanager_secret_rotated_periodically` | `max_days_secret_unrotated` | Integer |
|
||||
| `ssm_document_secrets` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean |
|
||||
| `vpc_endpoint_connections_trust_boundaries` | `trusted_account_ids` | List of Strings |
|
||||
| `vpc_endpoint_services_allowed_principals_trust_boundaries` | `trusted_account_ids` | List of Strings |
|
||||
|
||||
|
||||
## Azure
|
||||
@@ -125,8 +145,24 @@ aws:
|
||||
[
|
||||
"amazon-elb"
|
||||
]
|
||||
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
|
||||
ec2_sg_high_risk_ports:
|
||||
[
|
||||
25,
|
||||
110,
|
||||
135,
|
||||
143,
|
||||
445,
|
||||
3000,
|
||||
4333,
|
||||
5000,
|
||||
5500,
|
||||
8080,
|
||||
8088,
|
||||
]
|
||||
|
||||
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
|
||||
# AWS SSM Configuration (aws.ssm_documents_set_as_public)
|
||||
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
|
||||
# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g.
|
||||
# trusted_account_ids : ["123456789012", "098765432109", "678901234567"]
|
||||
@@ -194,7 +230,7 @@ aws:
|
||||
|
||||
# AWS CloudTrail Configuration
|
||||
# aws.cloudtrail_threat_detection_privilege_escalation
|
||||
threat_detection_privilege_escalation_threshold: 0.1 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.1 (10%)
|
||||
threat_detection_privilege_escalation_threshold: 0.2 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.2 (20%)
|
||||
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_privilege_escalation_actions:
|
||||
[
|
||||
@@ -251,7 +287,7 @@ aws:
|
||||
"UpdateLoginProfile",
|
||||
]
|
||||
# aws.cloudtrail_threat_detection_enumeration
|
||||
threat_detection_enumeration_threshold: 0.1 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
|
||||
threat_detection_enumeration_threshold: 0.3 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.3 (30%)
|
||||
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_enumeration_actions:
|
||||
[
|
||||
@@ -346,6 +382,24 @@ aws:
|
||||
"LookupEvents",
|
||||
"Search",
|
||||
]
|
||||
# aws.cloudtrail_threat_detection_llm_jacking
|
||||
threat_detection_llm_jacking_threshold: 0.4 # Percentage of actions found to decide if it is an LLM Jacking attack event, by default is 0.4 (40%)
|
||||
threat_detection_llm_jacking_minutes: 1440 # Past minutes to search from now for LLM Jacking attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_llm_jacking_actions:
|
||||
[
|
||||
"PutUseCaseForModelAccess", # Submits a use case for model access, providing justification (Write).
|
||||
"PutFoundationModelEntitlement", # Grants entitlement for accessing a foundation model (Write).
|
||||
"PutModelInvocationLoggingConfiguration", # Configures logging for model invocations (Write).
|
||||
"CreateFoundationModelAgreement", # Creates a new agreement to use a foundation model (Write).
|
||||
"InvokeModel", # Invokes a specified Bedrock model for inference using provided prompt and parameters (Read).
|
||||
"InvokeModelWithResponseStream", # Invokes a Bedrock model for inference with real-time token streaming (Read).
|
||||
"GetUseCaseForModelAccess", # Retrieves an existing use case for model access (Read).
|
||||
"GetModelInvocationLoggingConfiguration", # Fetches the logging configuration for model invocations (Read).
|
||||
"GetFoundationModelAvailability", # Checks the availability of a foundation model for use (Read).
|
||||
"ListFoundationModelAgreementOffers", # Lists available agreement offers for accessing foundation models (List).
|
||||
"ListFoundationModels", # Lists the available foundation models in Bedrock (List).
|
||||
"ListProvisionedModelThroughputs", # Lists the provisioned throughput for previously created models (List).
|
||||
]
|
||||
|
||||
# AWS RDS Configuration
|
||||
# aws.rds_instance_backup_enabled
|
||||
@@ -368,6 +422,18 @@ aws:
|
||||
"scheduler",
|
||||
]
|
||||
|
||||
# aws.eks_cluster_uses_a_supported_version
|
||||
# EKS clusters must be version 1.28 or higher
|
||||
eks_cluster_oldest_version_supported: "1.28"
|
||||
|
||||
# AWS CodeBuild Configuration
|
||||
# aws.codebuild_project_no_secrets_in_variables
|
||||
# CodeBuild sensitive variables that are excluded from the check
|
||||
excluded_sensitive_environment_variables:
|
||||
[
|
||||
|
||||
]
|
||||
|
||||
# Azure Configuration
|
||||
azure:
|
||||
# Azure Network Configuration
|
||||
|
||||
@@ -10,9 +10,11 @@ prowler dashboard
|
||||
To run Prowler local dashboard with Docker, use:
|
||||
|
||||
```sh
|
||||
docker run --env HOST=0.0.0.0 --publish 127.0.0.1:11666:11666 toniblyx/prowler:latest dashboard
|
||||
docker run -v /your/local/dir/prowler-output:/home/prowler/output --env HOST=0.0.0.0 --publish 127.0.0.1:11666:11666 toniblyx/prowler:latest dashboard
|
||||
```
|
||||
|
||||
Make sure you update the `/your/local/dir/prowler-output` to match the path that contains your prowler output.
|
||||
|
||||
???+ note
|
||||
**Remember that the `dashboard` server is not authenticated, if you expose it to the internet, you are running it at your own risk.**
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ prowler <provider> -c <check_to_fix_1> <check_to_fix_2> ... --fixer
|
||||
```sh
|
||||
prowler <provider> --list-fixers
|
||||
```
|
||||
|
||||
It's important to note that using the fixers for `Access Analyzer`, `GuardDuty`, and `SecurityHub` may incur additional costs. These AWS services might trigger actions or deploy resources that can lead to charges on your AWS account.
|
||||
## Writing a Fixer
|
||||
To write a fixer, you need to create a file called `<check_id>_fixer.py` inside the check folder, with a function called `fixer` that receives either the region or the resource to be fixed as a parameter, and returns a boolean value indicating if the fix was successful or not.
|
||||
|
||||
|
||||
22
docs/tutorials/gcp/organization.md
Normal file
22
docs/tutorials/gcp/organization.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# GCP Organization
|
||||
|
||||
By default, Prowler scans all Google Cloud projects accessible to the authenticated user.
|
||||
|
||||
To limit the scan to projects within a specific Google Cloud organization, use the `--organization-id` option with the GCP organization ID:
|
||||
|
||||
```console
|
||||
prowler gcp --organization-id organization-id
|
||||
```
|
||||
|
||||
???+ warning
|
||||
Make sure that the used credentials have the role Cloud Asset Viewer (`roles/cloudasset.viewer`) or Cloud Asset Owner (`roles/cloudasset.owner`) on the organization level.
|
||||
|
||||
???+ note
|
||||
With this option, Prowler retrieves all projects within the specified organization, including those organized in folders and nested subfolders. This ensures that every project under the organization’s hierarchy is scanned, providing full visibility across the entire organization.
|
||||
|
||||
???+ note
|
||||
To find the organization ID, use the following command:
|
||||
|
||||
```console
|
||||
gcloud organizations list
|
||||
```
|
||||
@@ -24,6 +24,11 @@ Prowler can run without showing its banner:
|
||||
```console
|
||||
prowler <provider> -b/--no-banner
|
||||
```
|
||||
## Disable Colors
|
||||
Prowler can run without showing colors:
|
||||
```console
|
||||
prowler <provider> --no-color
|
||||
```
|
||||
## Checks
|
||||
Prowler has checks per provider, there are options related with them:
|
||||
|
||||
@@ -120,5 +125,5 @@ prowler <provider> --list-categories
|
||||
```
|
||||
- Execute specific category(s):
|
||||
```console
|
||||
prowler <provider> --categories
|
||||
prowler <provider> --categories secrets
|
||||
```
|
||||
|
||||
@@ -7,97 +7,155 @@ Mutelist option works along with other options and will modify the output in the
|
||||
- CSV: `muted` is `True`. The field `status` will keep the original status, `MANUAL`, `PASS` or `FAIL`, of the finding.
|
||||
|
||||
|
||||
You can use `-w`/`--mutelist-file` with the path of your mutelist yaml file:
|
||||
## How the Mutelist Works
|
||||
|
||||
The **Mutelist** uses both "AND" and "OR" logic to determine which resources, checks, regions, and tags should be muted. For each check, the Mutelist evaluates whether the account, region, and resource match the specified criteria using "AND" logic. If tags are specified, the Mutelist can apply either "AND" or "OR" logic.
|
||||
|
||||
If any of the criteria do not match, the check is not muted.
|
||||
|
||||
???+ note
|
||||
Remember that mutelist can be used with regular expressions.
|
||||
|
||||
## Mutelist Specification
|
||||
|
||||
???+ note
|
||||
- For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
|
||||
- For GCP provider, the Account ID is the Project ID and the Region is the Zone.
|
||||
- For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
|
||||
|
||||
The Mutelist file uses the [YAML](https://en.wikipedia.org/wiki/YAML) format with the following syntax:
|
||||
|
||||
```yaml
|
||||
### Account, Check and/or Region can be * to apply for all the cases.
|
||||
### Resources and tags are lists that can have either Regex or Keywords.
|
||||
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
|
||||
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
|
||||
### For each check you can except Accounts, Regions, Resources and/or Tags.
|
||||
########################### MUTELIST EXAMPLE ###########################
|
||||
Mutelist:
|
||||
Accounts:
|
||||
"123456789012":
|
||||
Checks:
|
||||
"iam_user_hardware_mfa_enabled":
|
||||
Regions:
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "user-1" # Will mute user-1 in check iam_user_hardware_mfa_enabled
|
||||
- "user-2" # Will mute user-2 in check iam_user_hardware_mfa_enabled
|
||||
"ec2_*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*" # Will mute every EC2 check in every account and region
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test" # Will mute every resource containing the string "test" and the tags 'test=test' and
|
||||
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test"
|
||||
- "project=test" # This will mute every resource containing the string "test" and BOTH tags at the same time.
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags: # This will mute every resource containing the string "test" and the ones that contain EITHER the `test=test` OR `project=test` OR `project=dev`
|
||||
- "test=test|project=(test|dev)"
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test" # This will mute every resource containing the string "test" and the tags `test=test` and either `project=test` OR `project=stage` in every account and region.
|
||||
- "project=test|project=stage"
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
"s3_bucket_object_versioning":
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "ci-logs" # Will mute bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
|
||||
- "logs" # Will mute EVERY BUCKET containing the string "logs" in specified check and regions
|
||||
- ".+-logs" # Will mute all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
|
||||
"ecs_task_definitions_no_environment_secrets":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Accounts:
|
||||
- "0123456789012"
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "eu-south-2" # Will mute every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Tags:
|
||||
- "environment=dev" # Will mute every resource containing the tag 'environment=dev' in every account and region
|
||||
|
||||
"123456789012":
|
||||
Checks:
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "environment=prod" # Will mute every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
"ec2_*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test-resource" # Will mute the resource "test-resource" in all accounts and regions for whatever check from the EC2 service
|
||||
```
|
||||
|
||||
### Account, Check, Region, Resource, and Tag
|
||||
|
||||
| Field | Description | Logic |
|
||||
|----------|----------|----------|
|
||||
| `account_id` | Use `*` to apply the mutelist to all accounts. | `ANDed` |
|
||||
| `check_name` | The name of the Prowler check. Use `*` to apply the mutelist to all checks, or `service_*` to apply it to all service's checks. | `ANDed` |
|
||||
| `region` | The region identifier. Use `*` to apply the mutelist to all regions. | `ANDed` |
|
||||
| `resource` | The resource identifier. Use `*` to apply the mutelist to all resources. | `ANDed` |
|
||||
| `tag` | The tag value. | `ORed` |
|
||||
|
||||
|
||||
## How to Use the Mutelist
|
||||
|
||||
To use the Mutelist, you need to specify the path to the Mutelist YAML file using the `-w` or `--mutelist-file` option when running Prowler:
|
||||
|
||||
```
|
||||
prowler <provider> -w mutelist.yaml
|
||||
```
|
||||
|
||||
## Mutelist YAML File Syntax
|
||||
Replace `<provider>` with the appropriate provider name.
|
||||
|
||||
???+ note
|
||||
For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
|
||||
## Considerations
|
||||
|
||||
???+ note
|
||||
For GCP provider, the Account ID is the Project ID and the Region is the Zone.
|
||||
- The Mutelist can be used in combination with other Prowler options, such as the `--service` or `--checks` option, to further customize the scanning process.
|
||||
- Make sure to review and update the Mutelist regularly to ensure it reflects the desired exclusions and remains up to date with your infrastructure.
|
||||
|
||||
???+ note
|
||||
For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
|
||||
|
||||
The Mutelist file is a YAML file with the following syntax:
|
||||
|
||||
```yaml
|
||||
### Account, Check and/or Region can be * to apply for all the cases.
|
||||
### Resources and tags are lists that can have either Regex or Keywords.
|
||||
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
|
||||
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
|
||||
### For each check you can except Accounts, Regions, Resources and/or Tags.
|
||||
########################### MUTELIST EXAMPLE ###########################
|
||||
Mutelist:
|
||||
Accounts:
|
||||
"123456789012":
|
||||
Checks:
|
||||
"iam_user_hardware_mfa_enabled":
|
||||
Regions:
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
|
||||
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
|
||||
"ec2_*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*" # Will ignore every EC2 check in every account and region
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
|
||||
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
"s3_bucket_object_versioning":
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
|
||||
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
|
||||
- ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
|
||||
"ecs_task_definitions_no_environment_secrets":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Accounts:
|
||||
- "0123456789012"
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Tags:
|
||||
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
|
||||
|
||||
"123456789012":
|
||||
Checks:
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
|
||||
```
|
||||
|
||||
## AWS Mutelist
|
||||
### Mute specific AWS regions
|
||||
|
||||
@@ -142,7 +142,8 @@ The JSON-OCSF output format implements the [Detection Finding](https://schema.oc
|
||||
"desc": "Ensure CloudTrail is enabled in all regions",
|
||||
"product_uid": "prowler",
|
||||
"title": "Ensure CloudTrail is enabled in all regions",
|
||||
"uid": "prowler-aws-cloudtrail_multi_region_enabled-123456789012-ap-northeast-1-123456789012"
|
||||
"uid": "prowler-aws-cloudtrail_multi_region_enabled-123456789012-ap-northeast-1-123456789012",
|
||||
"types": ["Software and Configuration Checks","Industry and Regulatory Standards","CIS AWS Foundations Benchmark"],
|
||||
},
|
||||
"resources": [
|
||||
{
|
||||
@@ -189,11 +190,10 @@ The JSON-OCSF output format implements the [Detection Finding](https://schema.oc
|
||||
"type_uid": 200401,
|
||||
"type_name": "Create",
|
||||
"unmapped": {
|
||||
"check_type": "Software and Configuration Checks,Industry and Regulatory Standards,CIS AWS Foundations Benchmark",
|
||||
"related_url": "",
|
||||
"categories": "forensics-ready",
|
||||
"depends_on": "",
|
||||
"related_to": "",
|
||||
"categories": ["forensics-ready"],
|
||||
"depends_on": [],
|
||||
"related_to": [],
|
||||
"notes": "",
|
||||
"compliance": {
|
||||
"CISA": [
|
||||
@@ -336,7 +336,7 @@ The following is the mapping between the native JSON and the Detection Finding f
|
||||
| Provider | cloud.provider |
|
||||
| CheckID | metadata.event_code |
|
||||
| CheckTitle | finding_info.title |
|
||||
| CheckType | unmapped.check_type |
|
||||
| CheckType | finding_info.types |
|
||||
| ServiceName | resources.group.name |
|
||||
| SubServiceName | _Not mapped yet_ |
|
||||
| Status | status_code |
|
||||
|
||||
@@ -36,10 +36,11 @@ If EBS default encyption is not enabled, sensitive information at rest is not pr
|
||||
|
||||
- `ec2_ebs_default_encryption`
|
||||
|
||||
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups and the check for the default security group.
|
||||
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups, the check for the default security group and for the security groups that allow ingress and egress traffic.
|
||||
|
||||
- `ec2_securitygroup_allow_ingress_from_internet_to_port_X` (15 checks)
|
||||
- `ec2_securitygroup_default_restrict_traffic`
|
||||
- `ec2_securitygroup_allow_wide_open_public_ipv4`
|
||||
|
||||
Prowler will also check for used Network ACLs to only alerts those with open ports that are being used.
|
||||
|
||||
|
||||
@@ -87,6 +87,7 @@ nav:
|
||||
- Google Cloud:
|
||||
- Authentication: tutorials/gcp/authentication.md
|
||||
- Projects: tutorials/gcp/projects.md
|
||||
- Organization: tutorials/gcp/organization.md
|
||||
- Kubernetes:
|
||||
- In-Cluster Execution: tutorials/kubernetes/in-cluster.md
|
||||
- Non In-Cluster Execution: tutorials/kubernetes/outside-cluster.md
|
||||
|
||||
@@ -59,9 +59,12 @@ Resources:
|
||||
- 'appstream:Describe*'
|
||||
- 'appstream:List*'
|
||||
- 'backup:List*'
|
||||
- 'bedrock:List*'
|
||||
- 'bedrock:Get*'
|
||||
- 'cloudtrail:GetInsightSelectors'
|
||||
- 'codeartifact:List*'
|
||||
- 'codebuild:BatchGet*'
|
||||
- 'codebuild:ListReportGroups'
|
||||
- 'cognito-idp:GetUserPoolMfaConfig'
|
||||
- 'dlm:Get*'
|
||||
- 'drs:Describe*'
|
||||
@@ -82,6 +85,7 @@ Resources:
|
||||
- 'logs:FilterLogEvents'
|
||||
- 'lightsail:GetRelationalDatabases'
|
||||
- 'macie2:GetMacieSession'
|
||||
- 'macie2:GetAutomatedDiscoveryConfiguration'
|
||||
- 's3:GetAccountPublicAccessBlock'
|
||||
- 'shield:DescribeProtection'
|
||||
- 'shield:GetSubscriptionState'
|
||||
|
||||
@@ -7,9 +7,12 @@
|
||||
"appstream:Describe*",
|
||||
"appstream:List*",
|
||||
"backup:List*",
|
||||
"bedrock:List*",
|
||||
"bedrock:Get*",
|
||||
"cloudtrail:GetInsightSelectors",
|
||||
"codeartifact:List*",
|
||||
"codebuild:BatchGet*",
|
||||
"codebuild:ListReportGroups",
|
||||
"cognito-idp:GetUserPoolMfaConfig",
|
||||
"dlm:Get*",
|
||||
"drs:Describe*",
|
||||
@@ -30,6 +33,7 @@
|
||||
"logs:FilterLogEvents",
|
||||
"lightsail:GetRelationalDatabases",
|
||||
"macie2:GetMacieSession",
|
||||
"macie2:GetAutomatedDiscoveryConfiguration",
|
||||
"s3:GetAccountPublicAccessBlock",
|
||||
"shield:DescribeProtection",
|
||||
"shield:GetSubscriptionState",
|
||||
|
||||
4273
poetry.lock
generated
4273
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -5,6 +5,7 @@ import sys
|
||||
from os import environ
|
||||
|
||||
from colorama import Fore, Style
|
||||
from colorama import init as colorama_init
|
||||
|
||||
from prowler.config.config import (
|
||||
csv_file_suffix,
|
||||
@@ -15,8 +16,6 @@ from prowler.config.config import (
|
||||
)
|
||||
from prowler.lib.banner import print_banner
|
||||
from prowler.lib.check.check import (
|
||||
bulk_load_checks_metadata,
|
||||
bulk_load_compliance_frameworks,
|
||||
exclude_checks_to_run,
|
||||
exclude_services_to_run,
|
||||
execute_checks,
|
||||
@@ -36,10 +35,12 @@ from prowler.lib.check.check import (
|
||||
)
|
||||
from prowler.lib.check.checks_loader import load_checks_to_execute
|
||||
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.custom_checks_metadata import (
|
||||
parse_custom_checks_metadata_file,
|
||||
update_checks_metadata,
|
||||
)
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
from prowler.lib.cli.parser import ProwlerArgumentParser
|
||||
from prowler.lib.logger import logger, set_logging_config
|
||||
from prowler.lib.outputs.asff.asff import ASFF
|
||||
@@ -54,6 +55,7 @@ from prowler.lib.outputs.compliance.compliance import display_compliance_table
|
||||
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
|
||||
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
|
||||
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
|
||||
AzureMitreAttack,
|
||||
@@ -68,8 +70,12 @@ from prowler.lib.outputs.slack.slack import Slack
|
||||
from prowler.lib.outputs.summary_table import display_summary_table
|
||||
from prowler.providers.aws.lib.s3.s3 import S3
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
|
||||
from prowler.providers.aws.models import AWSOutputOptions
|
||||
from prowler.providers.azure.models import AzureOutputOptions
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.common.quick_inventory import run_provider_quick_inventory
|
||||
from prowler.providers.gcp.models import GCPOutputOptions
|
||||
from prowler.providers.kubernetes.models import KubernetesOutputOptions
|
||||
|
||||
|
||||
def prowler():
|
||||
@@ -107,6 +113,9 @@ def prowler():
|
||||
and not checks_folder
|
||||
)
|
||||
|
||||
if args.no_color:
|
||||
colorama_init(strip=True)
|
||||
|
||||
if not args.no_banner:
|
||||
legend = args.verbose or getattr(args, "fixer", None)
|
||||
print_banner(legend)
|
||||
@@ -131,7 +140,7 @@ def prowler():
|
||||
|
||||
# Load checks metadata
|
||||
logger.debug("Loading checks metadata from .metadata.json files")
|
||||
bulk_checks_metadata = bulk_load_checks_metadata(provider)
|
||||
bulk_checks_metadata = CheckMetadata.get_bulk(provider)
|
||||
|
||||
if args.list_categories:
|
||||
print_categories(list_categories(bulk_checks_metadata))
|
||||
@@ -141,7 +150,7 @@ def prowler():
|
||||
# Load compliance frameworks
|
||||
logger.debug("Loading compliance frameworks from .json files")
|
||||
|
||||
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
bulk_compliance_frameworks = Compliance.get_bulk(provider)
|
||||
# Complete checks metadata with the compliance framework specification
|
||||
bulk_checks_metadata = update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks, bulk_checks_metadata
|
||||
@@ -168,15 +177,15 @@ def prowler():
|
||||
|
||||
# Load checks to execute
|
||||
checks_to_execute = load_checks_to_execute(
|
||||
bulk_checks_metadata,
|
||||
bulk_compliance_frameworks,
|
||||
checks_file,
|
||||
checks,
|
||||
services,
|
||||
severities,
|
||||
compliance_framework,
|
||||
categories,
|
||||
provider,
|
||||
bulk_checks_metadata=bulk_checks_metadata,
|
||||
bulk_compliance_frameworks=bulk_compliance_frameworks,
|
||||
checks_file=checks_file,
|
||||
check_list=checks,
|
||||
service_list=services,
|
||||
severities=severities,
|
||||
compliance_frameworks=compliance_framework,
|
||||
categories=categories,
|
||||
provider=provider,
|
||||
)
|
||||
|
||||
# if --list-checks-json, dump a json file and exit
|
||||
@@ -190,7 +199,7 @@ def prowler():
|
||||
sys.exit()
|
||||
|
||||
# Provider to scan
|
||||
Provider.set_global_provider(args)
|
||||
Provider.init_global_provider(args)
|
||||
global_provider = Provider.get_global_provider()
|
||||
|
||||
# Print Provider Credentials
|
||||
@@ -224,17 +233,30 @@ def prowler():
|
||||
# Once the provider is set and we have the eventual checks based on the resource identifier,
|
||||
# it is time to check what Prowler's checks are going to be executed
|
||||
checks_from_resources = global_provider.get_checks_to_execute_by_audit_resources()
|
||||
if checks_from_resources:
|
||||
# Intersect checks from resources with checks to execute so we only run the checks that apply to the resources with the specified ARNs or tags
|
||||
if getattr(args, "resource_arn", None) or getattr(args, "resource_tag", None):
|
||||
checks_to_execute = checks_to_execute.intersection(checks_from_resources)
|
||||
|
||||
# Sort final check list
|
||||
checks_to_execute = sorted(checks_to_execute)
|
||||
|
||||
# Setup Mutelist
|
||||
global_provider.mutelist = args.mutelist_file
|
||||
|
||||
# Setup Output Options
|
||||
global_provider.output_options = (args, bulk_checks_metadata)
|
||||
if provider == "aws":
|
||||
output_options = AWSOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "azure":
|
||||
output_options = AzureOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "gcp":
|
||||
output_options = GCPOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "kubernetes":
|
||||
output_options = KubernetesOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
|
||||
# Run the quick inventory for the provider if available
|
||||
if hasattr(args, "quick_inventory") and args.quick_inventory:
|
||||
@@ -250,6 +272,7 @@ def prowler():
|
||||
global_provider,
|
||||
custom_checks_metadata,
|
||||
args.config_file,
|
||||
output_options,
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
@@ -257,7 +280,7 @@ def prowler():
|
||||
)
|
||||
|
||||
# Prowler Fixer
|
||||
if global_provider.output_options.fixer:
|
||||
if output_options.fixer:
|
||||
print(f"{Style.BRIGHT}\nRunning Prowler Fixer, please wait...{Style.RESET_ALL}")
|
||||
# Check if there are any FAIL findings
|
||||
if any("FAIL" in finding.status for finding in findings):
|
||||
@@ -303,7 +326,8 @@ def prowler():
|
||||
# TODO: this part is needed since the checks generates a Check_Report_XXX and the output uses Finding
|
||||
# This will be refactored for the outputs generate directly the Finding
|
||||
finding_outputs = [
|
||||
Finding.generate_output(global_provider, finding) for finding in findings
|
||||
Finding.generate_output(global_provider, finding, output_options)
|
||||
for finding in findings
|
||||
]
|
||||
|
||||
generated_outputs = {"regular": [], "compliance": []}
|
||||
@@ -311,8 +335,8 @@ def prowler():
|
||||
if args.output_formats:
|
||||
for mode in args.output_formats:
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/"
|
||||
f"{global_provider.output_options.output_filename}"
|
||||
f"{output_options.output_directory}/"
|
||||
f"{output_options.output_filename}"
|
||||
)
|
||||
if mode == "csv":
|
||||
csv_output = CSV(
|
||||
@@ -354,16 +378,16 @@ def prowler():
|
||||
)
|
||||
|
||||
# Compliance Frameworks
|
||||
input_compliance_frameworks = set(
|
||||
global_provider.output_options.output_modes
|
||||
).intersection(get_available_compliance_frameworks(provider))
|
||||
input_compliance_frameworks = set(output_options.output_modes).intersection(
|
||||
get_available_compliance_frameworks(provider)
|
||||
)
|
||||
if provider == "aws":
|
||||
for compliance_name in input_compliance_frameworks:
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
cis = AWSCIS(
|
||||
findings=finding_outputs,
|
||||
@@ -376,8 +400,8 @@ def prowler():
|
||||
elif compliance_name == "mitre_attack_aws":
|
||||
# Generate MITRE ATT&CK Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
mitre_attack = AWSMitreAttack(
|
||||
findings=finding_outputs,
|
||||
@@ -390,8 +414,8 @@ def prowler():
|
||||
elif compliance_name.startswith("ens_"):
|
||||
# Generate ENS Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
ens = AWSENS(
|
||||
findings=finding_outputs,
|
||||
@@ -404,8 +428,8 @@ def prowler():
|
||||
elif compliance_name.startswith("aws_well_architected_framework"):
|
||||
# Generate AWS Well-Architected Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
aws_well_architected = AWSWellArchitected(
|
||||
findings=finding_outputs,
|
||||
@@ -418,8 +442,8 @@ def prowler():
|
||||
elif compliance_name.startswith("iso27001_"):
|
||||
# Generate ISO27001 Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
iso27001 = AWSISO27001(
|
||||
findings=finding_outputs,
|
||||
@@ -429,10 +453,24 @@ def prowler():
|
||||
)
|
||||
generated_outputs["compliance"].append(iso27001)
|
||||
iso27001.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("kisa"):
|
||||
# Generate KISA-ISMS-P Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
kisa_ismsp = AWSKISAISMSP(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(kisa_ismsp)
|
||||
kisa_ismsp.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
@@ -448,8 +486,8 @@ def prowler():
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
cis = AzureCIS(
|
||||
findings=finding_outputs,
|
||||
@@ -462,8 +500,8 @@ def prowler():
|
||||
elif compliance_name == "mitre_attack_azure":
|
||||
# Generate MITRE ATT&CK Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
mitre_attack = AzureMitreAttack(
|
||||
findings=finding_outputs,
|
||||
@@ -475,8 +513,8 @@ def prowler():
|
||||
mitre_attack.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
@@ -492,8 +530,8 @@ def prowler():
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
cis = GCPCIS(
|
||||
findings=finding_outputs,
|
||||
@@ -506,8 +544,8 @@ def prowler():
|
||||
elif compliance_name == "mitre_attack_gcp":
|
||||
# Generate MITRE ATT&CK Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
mitre_attack = GCPMitreAttack(
|
||||
findings=finding_outputs,
|
||||
@@ -519,8 +557,8 @@ def prowler():
|
||||
mitre_attack.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
@@ -536,8 +574,8 @@ def prowler():
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
cis = KubernetesCIS(
|
||||
findings=finding_outputs,
|
||||
@@ -549,8 +587,8 @@ def prowler():
|
||||
cis.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
@@ -583,7 +621,11 @@ def prowler():
|
||||
)
|
||||
|
||||
security_hub_regions = (
|
||||
global_provider.get_available_aws_service_regions("securityhub")
|
||||
global_provider.get_available_aws_service_regions(
|
||||
"securityhub",
|
||||
global_provider.identity.partition,
|
||||
global_provider.identity.audited_regions,
|
||||
)
|
||||
if not global_provider.identity.audited_regions
|
||||
else global_provider.identity.audited_regions
|
||||
)
|
||||
@@ -593,7 +635,7 @@ def prowler():
|
||||
aws_partition=global_provider.identity.partition,
|
||||
aws_session=global_provider.session.current_session,
|
||||
findings=asff_output.data,
|
||||
send_only_fails=global_provider.output_options.send_sh_only_fails,
|
||||
send_only_fails=output_options.send_sh_only_fails,
|
||||
aws_security_hub_available_regions=security_hub_regions,
|
||||
)
|
||||
# Send the findings to Security Hub
|
||||
@@ -619,7 +661,7 @@ def prowler():
|
||||
display_summary_table(
|
||||
findings,
|
||||
global_provider,
|
||||
global_provider.output_options,
|
||||
output_options,
|
||||
)
|
||||
# Only display compliance table if there are findings (not all MANUAL) and it is a default execution
|
||||
if (
|
||||
@@ -638,13 +680,13 @@ def prowler():
|
||||
findings,
|
||||
bulk_checks_metadata,
|
||||
compliance,
|
||||
global_provider.output_options.output_filename,
|
||||
global_provider.output_options.output_directory,
|
||||
output_options.output_filename,
|
||||
output_options.output_directory,
|
||||
compliance_overview,
|
||||
)
|
||||
if compliance_overview:
|
||||
print(
|
||||
f"\nDetailed compliance results are in {Fore.YELLOW}{global_provider.output_options.output_directory}/compliance/{Style.RESET_ALL}\n"
|
||||
f"\nDetailed compliance results are in {Fore.YELLOW}{output_options.output_directory}/compliance/{Style.RESET_ALL}\n"
|
||||
)
|
||||
|
||||
# If custom checks were passed, remove the modules
|
||||
|
||||
@@ -557,7 +557,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"inspector2_findings_exist"
|
||||
"inspector2_is_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -587,7 +587,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"inspector2_findings_exist",
|
||||
"inspector2_active_findings_exist",
|
||||
"inspector2_is_enabled",
|
||||
"ecr_registry_scan_images_on_push_enabled",
|
||||
"ecr_repositories_scan_vulnerabilities_in_latest_image",
|
||||
"ecr_repositories_scan_images_on_push_enabled"
|
||||
|
||||
@@ -485,7 +485,7 @@
|
||||
"codeartifact_packages_external_public_publishing_disabled",
|
||||
"ecr_repositories_not_publicly_accessible",
|
||||
"efs_not_publicly_accessible",
|
||||
"eks_endpoints_not_publicly_accessible",
|
||||
"eks_cluster_not_publicly_accessible",
|
||||
"elb_internet_facing",
|
||||
"elbv2_internet_facing",
|
||||
"s3_account_level_public_access_blocks",
|
||||
@@ -664,7 +664,7 @@
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"apigateway_restapi_waf_acl_attached",
|
||||
"cloudfront_distributions_using_waf",
|
||||
"eks_control_plane_endpoint_access_restricted",
|
||||
"eks_cluster_not_publicly_accessible",
|
||||
"sagemaker_models_network_isolation_enabled",
|
||||
"sagemaker_models_vpc_settings_configured",
|
||||
"sagemaker_notebook_instance_vpc_settings_configured",
|
||||
|
||||
4333
prowler/compliance/aws/kisa_isms_p_2023_aws.json
Normal file
4333
prowler/compliance/aws/kisa_isms_p_2023_aws.json
Normal file
File diff suppressed because it is too large
Load Diff
4333
prowler/compliance/aws/kisa_isms_p_2023_korean_aws.json
Normal file
4333
prowler/compliance/aws/kisa_isms_p_2023_korean_aws.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -19,7 +19,7 @@
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_profile_attached",
|
||||
"ec2_instance_public_ip",
|
||||
"eks_endpoints_not_publicly_accessible",
|
||||
"eks_cluster_not_publicly_accessible",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
@@ -61,7 +61,7 @@
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_profile_attached",
|
||||
"ec2_instance_public_ip",
|
||||
"eks_endpoints_not_publicly_accessible",
|
||||
"eks_cluster_not_publicly_accessible",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
@@ -102,7 +102,7 @@
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"eks_endpoints_not_publicly_accessible",
|
||||
"eks_cluster_not_publicly_accessible",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
|
||||
@@ -971,7 +971,7 @@
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"eks_endpoints_not_publicly_accessible",
|
||||
"eks_cluster_not_publicly_accessible",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"awslambda_function_url_public",
|
||||
"rds_instance_no_public_access",
|
||||
|
||||
@@ -3043,9 +3043,7 @@
|
||||
{
|
||||
"Id": "9.4",
|
||||
"Description": "Ensure that Register with Entra ID is enabled on App Service",
|
||||
"Checks": [
|
||||
"app_client_certificates_on"
|
||||
],
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "9. AppService",
|
||||
@@ -3066,7 +3064,7 @@
|
||||
"Id": "9.5",
|
||||
"Description": "Ensure That 'PHP version' is the Latest, If Used to Run the Web App",
|
||||
"Checks": [
|
||||
"app_register_with_identity"
|
||||
"app_ensure_php_version_is_latest"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3088,7 +3086,7 @@
|
||||
"Id": "9.6",
|
||||
"Description": "Ensure that 'Python version' is the Latest Stable Version, if Used to Run the Web App",
|
||||
"Checks": [
|
||||
"app_ensure_php_version_is_latest"
|
||||
"app_ensure_python_version_is_latest"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3110,7 +3108,7 @@
|
||||
"Id": "9.7",
|
||||
"Description": "Ensure that 'Java version' is the latest, if used to run the Web App",
|
||||
"Checks": [
|
||||
"app_ensure_python_version_is_latest"
|
||||
"app_ensure_java_version_is_latest"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3132,7 +3130,7 @@
|
||||
"Id": "9.8",
|
||||
"Description": "Ensure that 'HTTP Version' is the Latest, if Used to Run the Web App",
|
||||
"Checks": [
|
||||
"app_ensure_java_version_is_latest"
|
||||
"app_ensure_using_http20"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3154,7 +3152,7 @@
|
||||
"Id": "9.9",
|
||||
"Description": "Ensure FTP deployments are Disabled",
|
||||
"Checks": [
|
||||
"app_ensure_using_http20"
|
||||
"app_ftp_deployment_disabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3175,9 +3173,7 @@
|
||||
{
|
||||
"Id": "9.10",
|
||||
"Description": "Ensure Azure Key Vaults are Used to Store Secrets",
|
||||
"Checks": [
|
||||
"app_ftp_deployment_disabled"
|
||||
],
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "9. AppService",
|
||||
@@ -3213,66 +3209,6 @@
|
||||
"References": "https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-lock-resources:https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-subscription-governance#azure-resource-locks:https://docs.microsoft.com/en-us/azure/governance/blueprints/concepts/resource-locking:https://learn.microsoft.com/en-us/security/benchmark/azure/mcsb-asset-management#am-4-limit-access-to-asset-management"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "9.10",
|
||||
"Description": "Ensure FTP deployments are Disabled",
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "9. AppService",
|
||||
"Profile": "Level 1",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "By default, Azure Functions, Web, and API Services can be deployed over FTP. If FTP is required for an essential deployment workflow, FTPS should be required for FTP login for all App Service Apps and Functions.",
|
||||
"RationaleStatement": "Azure FTP deployment endpoints are public. An attacker listening to traffic on a wifi network used by a remote employee or a corporate network could see login traffic in clear-text which would then grant them full control of the code base of the app or service. This finding is more severe if User Credentials for deployment are set at the subscription level rather than using the default Application Credentials which are unique per App.",
|
||||
"ImpactStatement": "Any deployment workflows that rely on FTP or FTPs rather than the WebDeploy or HTTPs endpoints may be affected.",
|
||||
"RemediationProcedure": "**From Azure Portal** 1. Go to the Azure Portal 2. Select `App Services` 3. Click on an app 4. Select `Settings` and then `Configuration` 5. Under `General Settings`, for the `Platform Settings`, the `FTP state` should be set to `Disabled` or `FTPS Only` **From Azure CLI** For each out of compliance application, run the following choosing either 'disabled' or 'FtpsOnly' as appropriate: ``` az webapp config set --resource-group <resource group name> --name <app name> --ftps-state [disabled|FtpsOnly] ``` **From PowerShell** For each out of compliance application, run the following: ``` Set-AzWebApp -ResourceGroupName <resource group name> -Name <app name> -FtpsState <Disabled or FtpsOnly> ```",
|
||||
"AuditProcedure": "**From Azure Portal** 1. Go to the Azure Portal 2. Select `App Services` 3. Click on an app 4. Select `Settings` and then `Configuration` 5. Under `General Settings`, for the `Platform Settings`, the `FTP state` should not be set to `All allowed` **From Azure CLI** List webapps to obtain the ids. ``` az webapp list ``` List the publish profiles to obtain the username, password and ftp server url. ``` az webapp deployment list-publishing-profiles --ids <ids> { publishUrl: <URL_FOR_WEB_APP>, userName: <USER_NAME>, userPWD: <USER_PASSWORD>, } ``` **From PowerShell** List all Web Apps: ``` Get-AzWebApp ``` For each app: ``` Get-AzWebApp -ResourceGroupName <resource group name> -Name <app name> | Select-Object -ExpandProperty SiteConfig ``` In the output, look for the value of **FtpsState**. If its value is **AllAllowed** the setting is out of compliance. Any other value is considered in compliance with this check.",
|
||||
"AdditionalInformation": "",
|
||||
"DefaultValue": "[Azure Web Service Deploy via FTP](https://docs.microsoft.com/en-us/azure/app-service/deploy-ftp):[Azure Web Service Deployment](https://docs.microsoft.com/en-us/azure/app-service/overview-security):https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-data-protection#dp-4-encrypt-sensitive-information-in-transit:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-posture-vulnerability-management#pv-7-rapidly-and-automatically-remediate-software-vulnerabilities",
|
||||
"References": "TA0008, T1570, M1031"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "9.11",
|
||||
"Description": "Ensure Azure Key Vaults are Used to Store Secrets",
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "9. AppService",
|
||||
"Profile": "Level 2",
|
||||
"AssessmentStatus": "Manual",
|
||||
"Description": "Azure Key Vault will store multiple types of sensitive information such as encryption keys, certificate thumbprints, and Managed Identity Credentials. Access to these 'Secrets' can be controlled through granular permissions.",
|
||||
"RationaleStatement": "The credentials given to an application have permissions to create, delete, or modify data stored within the systems they access. If these credentials are stored within the application itself, anyone with access to the application or a copy of the code has access to them. Storing within Azure Key Vault as secrets increases security by controlling access. This also allows for updates of the credentials without redeploying the entire application.",
|
||||
"ImpactStatement": "Integrating references to secrets within the key vault are required to be specifically integrated within the application code. This will require additional configuration to be made during the writing of an application, or refactoring of an already written one. There are also additional costs that are charged per 10000 requests to the Key Vault.",
|
||||
"RemediationProcedure": "Remediation has 2 steps 1. Setup the Key Vault 2. Setup the App Service to use the Key Vault **Step 1: Set up the Key Vault** **From Azure CLI** ``` az keyvault create --name <name> --resource-group <myResourceGroup> --location myLocation ``` **From Powershell** ``` New-AzKeyvault -name <name> -ResourceGroupName <myResourceGroup> -Location <myLocation> ``` **Step 2: Set up the App Service to use the Key Vault** Sample JSON Template for App Service Configuration: ``` { //... resources: [ { type: Microsoft.Storage/storageAccounts, name: [variables('storageAccountName')], //... }, { type: Microsoft.Insights/components, name: [variables('appInsightsName')], //... }, { type: Microsoft.Web/sites, name: [variables('functionAppName')], identity: { type: SystemAssigned }, //... resources: [ { type: config, name: appsettings, //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))], [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.KeyVault/vaults/secrets', variables('keyVaultName'), variables('storageConnectionStringName'))], [resourceId('Microsoft.KeyVault/vaults/secrets', variables('keyVaultName'), variables('appInsightsKeyName'))] ], properties: { AzureWebJobsStorage: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('storageConnectionStringResourceId')).secretUriWithVersion, ')')], WEBSITE_CONTENTAZUREFILECONNECTIONSTRING: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('storageConnectionStringResourceId')).secretUriWithVersion, ')')], APPINSIGHTS_INSTRUMENTATIONKEY: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('appInsightsKeyResourceId')).secretUriWithVersion, ')')], WEBSITE_ENABLE_SYNC_UPDATE_SITE: true //... } }, { type: sourcecontrols, name: web, //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))], [resourceId('Microsoft.Web/sites/config', variables('functionAppName'), 'appsettings')] ], } ] }, { type: Microsoft.KeyVault/vaults, name: [variables('keyVaultName')], //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))] ], properties: { //... accessPolicies: [ { tenantId: [reference(concat('Microsoft.Web/sites/', variables('functionAppName'), '/providers/Microsoft.ManagedIdentity/Identities/default'), '2015-08-31-PREVIEW').tenantId], objectId: [reference(concat('Microsoft.Web/sites/', variables('functionAppName'), '/providers/Microsoft.ManagedIdentity/Identities/default'), '2015-08-31-PREVIEW').principalId], permissions: { secrets: [ get ] } } ] }, resources: [ { type: secrets, name: [variables('storageConnectionStringName')], //... dependsOn: [ [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName'))] ], properties: { value: [concat('DefaultEndpointsProtocol=https;AccountName=', variables('storageAccountName'), ';AccountKey=', listKeys(variables('storageAccountResourceId'),'2015-05-01-preview').key1)] } }, { type: secrets, name: [variables('appInsightsKeyName')], //... dependsOn: [ [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.Insights/components', variables('appInsightsName'))] ], properties: { value: [reference(resourceId('microsoft.insights/components/', variables('appInsightsName')), '2015-05-01').InstrumentationKey] } } ] } ] } ```",
|
||||
"AuditProcedure": "**From Azure Portal** 1. Login to Azure Portal 2. In the expandable menu on the left go to `Key Vaults` 3. View the Key Vaults listed. **From Azure CLI** To list key vaults within a subscription run the following command: ``` Get-AzKeyVault ``` To list the secrets within these key vaults run the following command: ``` Get-AzKeyVaultSecret [-VaultName] <vault name> ``` **From Powershell** To list key vaults within a subscription run the following command: ``` Get-AzKeyVault ``` To list all secrets in a key vault run the following command: ``` Get-AzKeyVaultSecret -VaultName '<vaultName' ```",
|
||||
"AdditionalInformation": "",
|
||||
"DefaultValue": "https://docs.microsoft.com/en-us/azure/app-service/app-service-key-vault-references:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-identity-management#im-2-manage-application-identities-securely-and-automatically:https://docs.microsoft.com/en-us/cli/azure/keyvault?view=azure-cli-latest:https://docs.microsoft.com/en-us/cli/azure/keyvault?view=azure-cli-latest",
|
||||
"References": "TA0006, T1552, M1041"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "10.1",
|
||||
"Description": "Ensure that Resource Locks are set for Mission-Critical Azure Resources",
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "10. Miscellaneous",
|
||||
"Profile": "Level 2",
|
||||
"AssessmentStatus": "Manual",
|
||||
"Description": "Resource Manager Locks provide a way for administrators to lock down Azure resources to prevent deletion of, or modifications to, a resource. These locks sit outside of the Role Based Access Controls (RBAC) hierarchy and, when applied, will place restrictions on the resource for all users. These locks are very useful when there is an important resource in a subscription that users should not be able to delete or change. Locks can help prevent accidental and malicious changes or deletion.",
|
||||
"RationaleStatement": "As an administrator, it may be necessary to lock a subscription, resource group, or resource to prevent other users in the organization from accidentally deleting or modifying critical resources. The lock level can be set to to `CanNotDelete` or `ReadOnly` to achieve this purpose. - `CanNotDelete` means authorized users can still read and modify a resource, but they cannot delete the resource. - `ReadOnly` means authorized users can read a resource, but they cannot delete or update the resource. Applying this lock is similar to restricting all authorized users to the permissions granted by the Reader role.",
|
||||
"ImpactStatement": "There can be unintended outcomes of locking a resource. Applying a lock to a parent service will cause it to be inherited by all resources within. Conversely, applying a lock to a resource may not apply to connected storage, leaving it unlocked. Please see the documentation for further information.",
|
||||
"RemediationProcedure": "**From Azure Portal** 1. Navigate to the specific Azure Resource or Resource Group 2. For each mission critical resource, click on `Locks` 3. Click `Add` 4. Give the lock a name and a description, then select the type, `Read-only` or `Delete` as appropriate 5. Click OK **From Azure CLI** To lock a resource, provide the name of the resource, its resource type, and its resource group name. ``` az lock create --name <LockName> --lock-type <CanNotDelete/Read-only> --resource-group <resourceGroupName> --resource-name <resourceName> --resource-type <resourceType> ``` **From Powershell** ``` Get-AzResourceLock -ResourceName <Resource Name> -ResourceType <Resource Type> -ResourceGroupName <Resource Group Name> -Locktype <CanNotDelete/Read-only> ```",
|
||||
"AuditProcedure": "**From Azure Portal** 1. Navigate to the specific Azure Resource or Resource Group 2. Click on `Locks` 3. Ensure the lock is defined with name and description, with type `Read-only` or `Delete` as appropriate. **From Azure CLI** Review the list of all locks set currently: ``` az lock list --resource-group <resourcegroupname> --resource-name <resourcename> --namespace <Namespace> --resource-type <type> --parent ``` **From Powershell** Run the following command to list all resources. ``` Get-AzResource ``` For each resource, run the following command to check for Resource Locks. ``` Get-AzResourceLock -ResourceName <Resource Name> -ResourceType <Resource Type> -ResourceGroupName <Resource Group Name> ``` Review the output of the `Properties` setting. Compliant settings will have the `CanNotDelete` or `ReadOnly` value.",
|
||||
"AdditionalInformation": "",
|
||||
"DefaultValue": "https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-lock-resources:https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-subscription-governance#azure-resource-locks:https://docs.microsoft.com/en-us/azure/governance/blueprints/concepts/resource-locking:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-asset-management#am-4-limit-access-to-asset-management",
|
||||
"References": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -19,8 +19,11 @@ Mutelist:
|
||||
- "StackSet-AWSControlTowerSecurityResources-*"
|
||||
- "StackSet-AWSControlTowerLoggingResources-*"
|
||||
- "StackSet-AWSControlTowerExecutionRole-*"
|
||||
- "AWSControlTowerBP-BASELINE-CLOUDTRAIL-MASTER"
|
||||
- "AWSControlTowerBP-BASELINE-CONFIG-MASTER"
|
||||
- "AWSControlTowerBP-BASELINE-CLOUDTRAIL-MASTER*"
|
||||
- "AWSControlTowerBP-BASELINE-CONFIG-MASTER*"
|
||||
- "StackSet-AWSControlTower*"
|
||||
- "CLOUDTRAIL-ENABLED-ON-SHARED-ACCOUNTS-*"
|
||||
- "AFT-Backend*"
|
||||
"cloudtrail_*":
|
||||
Regions:
|
||||
- "*"
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
import os
|
||||
import pathlib
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from os import getcwd
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
from packaging import version
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "4.3.1"
|
||||
prowler_version = "4.6.0"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
|
||||
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
|
||||
@@ -20,8 +22,13 @@ gcp_logo = "https://user-images.githubusercontent.com/38561120/235928332-eb4accd
|
||||
orange_color = "\033[38;5;208m"
|
||||
banner_color = "\033[1;92m"
|
||||
|
||||
finding_statuses = ["PASS", "FAIL", "MANUAL"]
|
||||
valid_severities = ["critical", "high", "medium", "low", "informational"]
|
||||
|
||||
class Provider(str, Enum):
|
||||
AWS = "aws"
|
||||
GCP = "gcp"
|
||||
AZURE = "azure"
|
||||
KUBERNETES = "kubernetes"
|
||||
|
||||
|
||||
# Compliance
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
@@ -29,7 +36,7 @@ actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
def get_available_compliance_frameworks(provider=None):
|
||||
available_compliance_frameworks = []
|
||||
providers = ["aws", "gcp", "azure", "kubernetes"]
|
||||
providers = [p.value for p in Provider]
|
||||
if provider:
|
||||
providers = [provider]
|
||||
for provider in providers:
|
||||
@@ -86,7 +93,7 @@ def check_current_version():
|
||||
"https://api.github.com/repos/prowler-cloud/prowler/tags", timeout=1
|
||||
)
|
||||
latest_version = release_response.json()[0]["name"]
|
||||
if latest_version != prowler_version:
|
||||
if version.parse(latest_version) > version.parse(prowler_version):
|
||||
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
|
||||
else:
|
||||
return (
|
||||
|
||||
@@ -41,8 +41,29 @@ aws:
|
||||
[
|
||||
"amazon-elb"
|
||||
]
|
||||
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
|
||||
ec2_sg_high_risk_ports:
|
||||
[
|
||||
25,
|
||||
110,
|
||||
135,
|
||||
143,
|
||||
445,
|
||||
3000,
|
||||
4333,
|
||||
5000,
|
||||
5500,
|
||||
8080,
|
||||
8088,
|
||||
]
|
||||
|
||||
# AWS ECS Configuration
|
||||
# aws.ecs_service_fargate_latest_platform_version
|
||||
fargate_linux_latest_version: "1.4.0"
|
||||
fargate_windows_latest_version: "1.0.0"
|
||||
|
||||
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
|
||||
# AWS SSM Configuration (aws.ssm_documents_set_as_public)
|
||||
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
|
||||
# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g.
|
||||
# trusted_account_ids : ["123456789012", "098765432109", "678901234567"]
|
||||
@@ -51,6 +72,31 @@ aws:
|
||||
# AWS Cloudwatch Configuration
|
||||
# aws.cloudwatch_log_group_retention_policy_specific_days_enabled --> by default is 365 days
|
||||
log_group_retention_days: 365
|
||||
# aws.cloudwatch_log_group_no_critical_pii_in_logs --> see all available entities in https://microsoft.github.io/presidio/supported_entities/
|
||||
critical_pii_entities : [
|
||||
"CREDIT_CARD", # Credit card numbers are highly sensitive financial information.
|
||||
"CRYPTO", # Crypto wallet numbers (e.g., Bitcoin addresses) can give access to cryptocurrency.
|
||||
"IBAN_CODE", # International Bank Account Numbers are critical financial information.
|
||||
"US_BANK_NUMBER", # US bank account numbers are sensitive and should be protected.
|
||||
"US_SSN", # US Social Security Numbers are critical PII used for identity verification.
|
||||
"US_PASSPORT", # US passport numbers can be used for identity theft.
|
||||
"US_ITIN", # US Individual Taxpayer Identification Numbers are sensitive personal identifiers.
|
||||
#"UK_NHS", # UK NHS numbers can be used to access medical records and other private information.
|
||||
#"ES_NIF", # Spanish NIF (Personal tax ID) is critical for identification and tax purposes.
|
||||
#"ES_NIE", # Spanish NIE (Foreigners ID card) is a critical identifier for foreign residents.
|
||||
#"IT_FISCAL_CODE", # Italian personal identification code is sensitive PII for tax and legal purposes.
|
||||
#"IT_PASSPORT", # Italian passport numbers are critical PII.
|
||||
#"IT_IDENTITY_CARD", # Italian identity card numbers are critical for personal identification.
|
||||
#"PL_PESEL", # Polish PESEL numbers are sensitive personal identifiers.
|
||||
#"SG_NRIC_FIN", # Singapore National Registration Identification Card is critical PII.
|
||||
#"AU_ABN", # Australian Business Numbers are critical for business identification.
|
||||
#"AU_TFN", # Australian Tax File Numbers are sensitive and used for taxation purposes.
|
||||
#"AU_MEDICARE", # Australian Medicare numbers are sensitive medical identifiers.
|
||||
#"IN_PAN", # Indian Permanent Account Numbers are critical for tax purposes and identity.
|
||||
#"IN_AADHAAR", # Indian Aadhaar numbers are highly sensitive and serve as a universal identity number.
|
||||
#"FI_PERSONAL_IDENTITY_CODE" # Finnish Personal Identity Code is sensitive PII for personal identification.
|
||||
]
|
||||
pii_language: "en" # Language for recognizing PII entities
|
||||
|
||||
# AWS AppStream Session Configuration
|
||||
# aws.appstream_fleet_session_idle_disconnect_timeout
|
||||
@@ -78,7 +124,9 @@ aws:
|
||||
"nodejs10.x",
|
||||
"nodejs12.x",
|
||||
"nodejs14.x",
|
||||
"nodejs16.x",
|
||||
"dotnet5.0",
|
||||
"dotnet7",
|
||||
"dotnetcore1.0",
|
||||
"dotnetcore2.0",
|
||||
"dotnetcore2.1",
|
||||
@@ -86,6 +134,8 @@ aws:
|
||||
"ruby2.5",
|
||||
"ruby2.7",
|
||||
]
|
||||
# aws.awslambda_function_vpc_is_in_multi_azs
|
||||
lambda_min_azs: 2
|
||||
|
||||
# AWS Organizations
|
||||
# aws.organizations_scp_check_deny_regions
|
||||
@@ -110,7 +160,7 @@ aws:
|
||||
|
||||
# AWS CloudTrail Configuration
|
||||
# aws.cloudtrail_threat_detection_privilege_escalation
|
||||
threat_detection_privilege_escalation_threshold: 0.1 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.1 (10%)
|
||||
threat_detection_privilege_escalation_threshold: 0.2 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.2 (20%)
|
||||
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_privilege_escalation_actions:
|
||||
[
|
||||
@@ -167,7 +217,7 @@ aws:
|
||||
"UpdateLoginProfile",
|
||||
]
|
||||
# aws.cloudtrail_threat_detection_enumeration
|
||||
threat_detection_enumeration_threshold: 0.1 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
|
||||
threat_detection_enumeration_threshold: 0.3 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.3 (30%)
|
||||
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_enumeration_actions:
|
||||
[
|
||||
@@ -262,6 +312,24 @@ aws:
|
||||
"LookupEvents",
|
||||
"Search",
|
||||
]
|
||||
# aws.cloudtrail_threat_detection_llm_jacking
|
||||
threat_detection_llm_jacking_threshold: 0.4 # Percentage of actions found to decide if it is an LLM Jacking attack event, by default is 0.4 (40%)
|
||||
threat_detection_llm_jacking_minutes: 1440 # Past minutes to search from now for LLM Jacking attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_llm_jacking_actions:
|
||||
[
|
||||
"PutUseCaseForModelAccess", # Submits a use case for model access, providing justification (Write).
|
||||
"PutFoundationModelEntitlement", # Grants entitlement for accessing a foundation model (Write).
|
||||
"PutModelInvocationLoggingConfiguration", # Configures logging for model invocations (Write).
|
||||
"CreateFoundationModelAgreement", # Creates a new agreement to use a foundation model (Write).
|
||||
"InvokeModel", # Invokes a specified Bedrock model for inference using provided prompt and parameters (Read).
|
||||
"InvokeModelWithResponseStream", # Invokes a Bedrock model for inference with real-time token streaming (Read).
|
||||
"GetUseCaseForModelAccess", # Retrieves an existing use case for model access (Read).
|
||||
"GetModelInvocationLoggingConfiguration", # Fetches the logging configuration for model invocations (Read).
|
||||
"GetFoundationModelAvailability", # Checks the availability of a foundation model for use (Read).
|
||||
"ListFoundationModelAgreementOffers", # Lists available agreement offers for accessing foundation models (List).
|
||||
"ListFoundationModels", # Lists the available foundation models in Bedrock (List).
|
||||
"ListProvisionedModelThroughputs", # Lists the provisioned throughput for previously created models (List).
|
||||
]
|
||||
|
||||
# AWS RDS Configuration
|
||||
# aws.rds_instance_backup_enabled
|
||||
@@ -271,6 +339,11 @@ aws:
|
||||
# AWS ACM Configuration
|
||||
# aws.acm_certificates_expiration_check
|
||||
days_to_expire_threshold: 7
|
||||
# aws.acm_certificates_rsa_key_length
|
||||
insecure_key_algorithms:
|
||||
[
|
||||
"RSA-1024",
|
||||
]
|
||||
|
||||
# AWS EKS Configuration
|
||||
# aws.eks_control_plane_logging_all_types_enabled
|
||||
@@ -284,6 +357,42 @@ aws:
|
||||
"scheduler",
|
||||
]
|
||||
|
||||
# aws.eks_cluster_uses_a_supported_version
|
||||
# EKS clusters must be version 1.28 or higher
|
||||
eks_cluster_oldest_version_supported: "1.28"
|
||||
|
||||
# AWS CodeBuild Configuration
|
||||
# aws.codebuild_project_no_secrets_in_variables
|
||||
# CodeBuild sensitive variables that are excluded from the check
|
||||
excluded_sensitive_environment_variables:
|
||||
[
|
||||
|
||||
]
|
||||
|
||||
# AWS ELB Configuration
|
||||
# aws.elb_is_in_multiple_az
|
||||
# Minimum number of Availability Zones that an CLB must be in
|
||||
elb_min_azs: 2
|
||||
|
||||
# AWS ELBv2 Configuration
|
||||
# aws.elbv2_is_in_multiple_az
|
||||
# Minimum number of Availability Zones that an ELBv2 must be in
|
||||
elbv2_min_azs: 2
|
||||
|
||||
|
||||
# AWS Secrets Configuration
|
||||
# Patterns to ignore in the secrets checks
|
||||
secrets_ignore_patterns: []
|
||||
|
||||
# AWS Secrets Manager Configuration
|
||||
# aws.secretsmanager_secret_unused
|
||||
# Maximum number of days a secret can be unused
|
||||
max_days_secret_unused: 90
|
||||
|
||||
# aws.secretsmanager_secret_rotated_periodically
|
||||
# Maximum number of days a secret should be rotated
|
||||
max_days_secret_unrotated: 90
|
||||
|
||||
# Azure Configuration
|
||||
azure:
|
||||
# Azure Network Configuration
|
||||
|
||||
56
prowler/exceptions/exceptions.py
Normal file
56
prowler/exceptions/exceptions.py
Normal file
@@ -0,0 +1,56 @@
|
||||
class ProwlerException(Exception):
|
||||
"""Base exception for all Prowler SDK errors."""
|
||||
|
||||
ERROR_CODES = {
|
||||
(1901, "UnexpectedError"): {
|
||||
"message": "Unexpected error occurred.",
|
||||
"remediation": "Please review the error message and try again.",
|
||||
}
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self, code, source=None, file=None, original_exception=None, error_info=None
|
||||
):
|
||||
"""
|
||||
Initialize the ProwlerException class.
|
||||
|
||||
Args:
|
||||
code (int): The error code.
|
||||
source (str): The source name. This can be the provider name, module name, service name, etc.
|
||||
file (str): The file name.
|
||||
original_exception (Exception): The original exception.
|
||||
error_info (dict): The error information.
|
||||
|
||||
Example:
|
||||
A ProwlerException is raised with the following parameters and format:
|
||||
>>> original_exception = Exception("Error occurred.")
|
||||
ProwlerException(1901, "AWS", "file.txt", original_exception)
|
||||
>>> [1901] Unexpected error occurred. - Exception: Error occurred.
|
||||
"""
|
||||
self.code = code
|
||||
self.source = source
|
||||
self.file = file
|
||||
if error_info is None:
|
||||
error_info = self.ERROR_CODES.get((code, self.__class__.__name__))
|
||||
self.message = error_info.get("message")
|
||||
self.remediation = error_info.get("remediation")
|
||||
self.original_exception = original_exception
|
||||
# Format -> [code] message - original_exception
|
||||
if original_exception is None:
|
||||
super().__init__(f"[{self.code}] {self.message}")
|
||||
else:
|
||||
super().__init__(
|
||||
f"[{self.code}] {self.message} - {self.original_exception}"
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
"""Overriding the __str__ method"""
|
||||
default_str = f"{self.__class__.__name__}[{self.code}]: {self.message}"
|
||||
if self.original_exception:
|
||||
default_str += f" - {self.original_exception}"
|
||||
return default_str
|
||||
|
||||
|
||||
class UnexpectedError(ProwlerException):
|
||||
def __init__(self, source, file, original_exception=None):
|
||||
super().__init__(1901, source, file, original_exception)
|
||||
@@ -1,4 +1,3 @@
|
||||
import functools
|
||||
import importlib
|
||||
import json
|
||||
import os
|
||||
@@ -6,7 +5,6 @@ import re
|
||||
import shutil
|
||||
import sys
|
||||
import traceback
|
||||
from pkgutil import walk_packages
|
||||
from types import ModuleType
|
||||
from typing import Any
|
||||
|
||||
@@ -15,68 +13,15 @@ from colorama import Fore, Style
|
||||
|
||||
import prowler
|
||||
from prowler.config.config import orange_color
|
||||
from prowler.lib.check.compliance_models import load_compliance_framework
|
||||
from prowler.lib.check.custom_checks_metadata import update_check_metadata
|
||||
from prowler.lib.check.models import Check, load_check_metadata
|
||||
from prowler.lib.check.models import Check
|
||||
from prowler.lib.check.utils import recover_checks_from_provider
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.outputs import report
|
||||
from prowler.lib.utils.utils import open_file, parse_json_file, print_boxes
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
|
||||
# Load all checks metadata
|
||||
def bulk_load_checks_metadata(provider: str) -> dict:
|
||||
bulk_check_metadata = {}
|
||||
checks = recover_checks_from_provider(provider)
|
||||
# Build list of check's metadata files
|
||||
for check_info in checks:
|
||||
# Build check path name
|
||||
check_name = check_info[0]
|
||||
check_path = check_info[1]
|
||||
# Ignore fixer files
|
||||
if check_name.endswith("_fixer"):
|
||||
continue
|
||||
# Append metadata file extension
|
||||
metadata_file = f"{check_path}/{check_name}.metadata.json"
|
||||
# Load metadata
|
||||
check_metadata = load_check_metadata(metadata_file)
|
||||
bulk_check_metadata[check_metadata.CheckID] = check_metadata
|
||||
|
||||
return bulk_check_metadata
|
||||
|
||||
|
||||
# Bulk load all compliance frameworks specification
|
||||
def bulk_load_compliance_frameworks(provider: str) -> dict:
|
||||
"""Bulk load all compliance frameworks specification into a dict"""
|
||||
try:
|
||||
bulk_compliance_frameworks = {}
|
||||
available_compliance_framework_modules = list_compliance_modules()
|
||||
for compliance_framework in available_compliance_framework_modules:
|
||||
if provider in compliance_framework.name:
|
||||
compliance_specification_dir_path = (
|
||||
f"{compliance_framework.module_finder.path}/{provider}"
|
||||
)
|
||||
|
||||
# for compliance_framework in available_compliance_framework_modules:
|
||||
for filename in os.listdir(compliance_specification_dir_path):
|
||||
file_path = os.path.join(
|
||||
compliance_specification_dir_path, filename
|
||||
)
|
||||
# Check if it is a file and ti size is greater than 0
|
||||
if os.path.isfile(file_path) and os.stat(file_path).st_size > 0:
|
||||
# Open Compliance file in JSON
|
||||
# cis_v1.4_aws.json --> cis_v1.4_aws
|
||||
compliance_framework_name = filename.split(".json")[0]
|
||||
# Store the compliance info
|
||||
bulk_compliance_frameworks[compliance_framework_name] = (
|
||||
load_compliance_framework(file_path)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
return bulk_compliance_frameworks
|
||||
|
||||
|
||||
# Exclude checks to run
|
||||
def exclude_checks_to_run(checks_to_execute: set, excluded_checks: list) -> set:
|
||||
for check in excluded_checks:
|
||||
@@ -328,7 +273,7 @@ def print_checks(
|
||||
for check in check_list:
|
||||
try:
|
||||
print(
|
||||
f"[{bulk_checks_metadata[check].CheckID}] {bulk_checks_metadata[check].CheckTitle} - {Fore.MAGENTA}{bulk_checks_metadata[check].ServiceName} {Fore.YELLOW}[{bulk_checks_metadata[check].Severity}]{Style.RESET_ALL}"
|
||||
f"[{bulk_checks_metadata[check].CheckID}] {bulk_checks_metadata[check].CheckTitle} - {Fore.MAGENTA}{bulk_checks_metadata[check].ServiceName} {Fore.YELLOW}[{bulk_checks_metadata[check].Severity.value}]{Style.RESET_ALL}"
|
||||
)
|
||||
except KeyError as error:
|
||||
logger.error(
|
||||
@@ -347,126 +292,12 @@ def print_checks(
|
||||
print(message)
|
||||
|
||||
|
||||
# Parse checks from compliance frameworks specification
|
||||
def parse_checks_from_compliance_framework(
|
||||
compliance_frameworks: list, bulk_compliance_frameworks: dict
|
||||
) -> list:
|
||||
"""parse_checks_from_compliance_framework returns a set of checks from the given compliance_frameworks"""
|
||||
checks_to_execute = set()
|
||||
try:
|
||||
for framework in compliance_frameworks:
|
||||
# compliance_framework_json["Requirements"][*]["Checks"]
|
||||
compliance_framework_checks_list = [
|
||||
requirement.Checks
|
||||
for requirement in bulk_compliance_frameworks[framework].Requirements
|
||||
]
|
||||
# Reduce nested list into a list
|
||||
# Pythonic functional magic
|
||||
compliance_framework_checks = functools.reduce(
|
||||
lambda x, y: x + y, compliance_framework_checks_list
|
||||
)
|
||||
# Then union this list of checks with the initial one
|
||||
checks_to_execute = checks_to_execute.union(compliance_framework_checks)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
return checks_to_execute
|
||||
|
||||
|
||||
def recover_checks_from_provider(
|
||||
provider: str, service: str = None, include_fixers: bool = False
|
||||
) -> list[tuple]:
|
||||
"""
|
||||
Recover all checks from the selected provider and service
|
||||
|
||||
Returns a list of tuples with the following format (check_name, check_path)
|
||||
"""
|
||||
try:
|
||||
checks = []
|
||||
modules = list_modules(provider, service)
|
||||
for module_name in modules:
|
||||
# Format: "prowler.providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_module_name = module_name.name
|
||||
# We need to exclude common shared libraries in services
|
||||
if (
|
||||
check_module_name.count(".") == 6
|
||||
and "lib" not in check_module_name
|
||||
and (not check_module_name.endswith("_fixer") or include_fixers)
|
||||
):
|
||||
check_path = module_name.module_finder.path
|
||||
# Check name is the last part of the check_module_name
|
||||
check_name = check_module_name.split(".")[-1]
|
||||
check_info = (check_name, check_path)
|
||||
checks.append(check_info)
|
||||
except ModuleNotFoundError:
|
||||
logger.critical(f"Service {service} was not found for the {provider} provider.")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
return checks
|
||||
|
||||
|
||||
def list_compliance_modules():
|
||||
"""
|
||||
list_compliance_modules returns the available compliance frameworks and returns their path
|
||||
"""
|
||||
# This module path requires the full path including "prowler."
|
||||
module_path = "prowler.compliance"
|
||||
return walk_packages(
|
||||
importlib.import_module(module_path).__path__,
|
||||
importlib.import_module(module_path).__name__ + ".",
|
||||
)
|
||||
|
||||
|
||||
# List all available modules in the selected provider and service
|
||||
def list_modules(provider: str, service: str):
|
||||
# This module path requires the full path including "prowler."
|
||||
module_path = f"prowler.providers.{provider}.services"
|
||||
if service:
|
||||
module_path += f".{service}"
|
||||
return walk_packages(
|
||||
importlib.import_module(module_path).__path__,
|
||||
importlib.import_module(module_path).__name__ + ".",
|
||||
)
|
||||
|
||||
|
||||
# Import an input check using its path
|
||||
def import_check(check_path: str) -> ModuleType:
|
||||
lib = importlib.import_module(f"{check_path}")
|
||||
return lib
|
||||
|
||||
|
||||
def run_check(check: Check, verbose: bool = False, only_logs: bool = False) -> list:
|
||||
"""
|
||||
Run the check and return the findings
|
||||
Args:
|
||||
check (Check): check class
|
||||
output_options (Any): output options
|
||||
Returns:
|
||||
list: list of findings
|
||||
"""
|
||||
findings = []
|
||||
if verbose:
|
||||
print(
|
||||
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity}]{Style.RESET_ALL}"
|
||||
)
|
||||
logger.debug(f"Executing check: {check.CheckID}")
|
||||
try:
|
||||
findings = check.execute()
|
||||
except Exception as error:
|
||||
if not only_logs:
|
||||
print(
|
||||
f"Something went wrong in {check.CheckID}, please use --log-level ERROR"
|
||||
)
|
||||
logger.error(
|
||||
f"{check.CheckID} -- {error.__class__.__name__}[{traceback.extract_tb(error.__traceback__)[-1].lineno}]: {error}"
|
||||
)
|
||||
finally:
|
||||
return findings
|
||||
|
||||
|
||||
def run_fixer(check_findings: list) -> int:
|
||||
"""
|
||||
Run the fixer for the check if it exists and there are any FAIL findings
|
||||
@@ -548,6 +379,7 @@ def execute_checks(
|
||||
global_provider: Any,
|
||||
custom_checks_metadata: Any,
|
||||
config_file: str,
|
||||
output_options: Any,
|
||||
) -> list:
|
||||
# List to store all the check's findings
|
||||
all_findings = []
|
||||
@@ -583,22 +415,51 @@ def execute_checks(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
# Set verbose flag
|
||||
verbose = False
|
||||
if hasattr(output_options, "verbose"):
|
||||
verbose = output_options.verbose
|
||||
elif hasattr(output_options, "fixer"):
|
||||
verbose = output_options.fixer
|
||||
|
||||
# Execution with the --only-logs flag
|
||||
if global_provider.output_options.only_logs:
|
||||
if output_options.only_logs:
|
||||
for check_name in checks_to_execute:
|
||||
# Recover service from check name
|
||||
service = check_name.split("_")[0]
|
||||
try:
|
||||
try:
|
||||
# Import check module
|
||||
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
|
||||
lib = import_check(check_module_path)
|
||||
# Recover functions from check
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
check = check_to_execute()
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
|
||||
)
|
||||
continue
|
||||
if verbose:
|
||||
print(
|
||||
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity.value}]{Style.RESET_ALL}"
|
||||
)
|
||||
check_findings = execute(
|
||||
service,
|
||||
check_name,
|
||||
check,
|
||||
global_provider,
|
||||
services_executed,
|
||||
checks_executed,
|
||||
custom_checks_metadata,
|
||||
output_options,
|
||||
)
|
||||
report(check_findings, global_provider, output_options)
|
||||
all_findings.extend(check_findings)
|
||||
|
||||
# Update Audit Status
|
||||
services_executed.add(service)
|
||||
checks_executed.add(check_name)
|
||||
global_provider.audit_metadata = update_audit_metadata(
|
||||
global_provider.audit_metadata, services_executed, checks_executed
|
||||
)
|
||||
|
||||
# If check does not exists in the provider or is from another provider
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
@@ -647,15 +508,39 @@ def execute_checks(
|
||||
f"-> Scanning {orange_color}{service}{Style.RESET_ALL} service"
|
||||
)
|
||||
try:
|
||||
try:
|
||||
# Import check module
|
||||
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
|
||||
lib = import_check(check_module_path)
|
||||
# Recover functions from check
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
check = check_to_execute()
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
|
||||
)
|
||||
continue
|
||||
if verbose:
|
||||
print(
|
||||
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity.value}]{Style.RESET_ALL}"
|
||||
)
|
||||
check_findings = execute(
|
||||
service,
|
||||
check_name,
|
||||
check,
|
||||
global_provider,
|
||||
custom_checks_metadata,
|
||||
output_options,
|
||||
)
|
||||
|
||||
report(check_findings, global_provider, output_options)
|
||||
|
||||
all_findings.extend(check_findings)
|
||||
services_executed.add(service)
|
||||
checks_executed.add(check_name)
|
||||
global_provider.audit_metadata = update_audit_metadata(
|
||||
global_provider.audit_metadata,
|
||||
services_executed,
|
||||
checks_executed,
|
||||
custom_checks_metadata,
|
||||
)
|
||||
all_findings.extend(check_findings)
|
||||
|
||||
# If check does not exists in the provider or is from another provider
|
||||
except ModuleNotFoundError:
|
||||
@@ -670,60 +555,79 @@ def execute_checks(
|
||||
)
|
||||
bar()
|
||||
bar.title = f"-> {Fore.GREEN}Scan completed!{Style.RESET_ALL}"
|
||||
|
||||
# Custom report interface
|
||||
if os.environ.get("PROWLER_REPORT_LIB_PATH"):
|
||||
try:
|
||||
logger.info("Using custom report interface ...")
|
||||
lib = os.environ["PROWLER_REPORT_LIB_PATH"]
|
||||
outputs_module = importlib.import_module(lib)
|
||||
custom_report_interface = getattr(outputs_module, "report")
|
||||
|
||||
# TODO: review this call and see if we can remove the global_provider.output_options since it is contained in the global_provider
|
||||
custom_report_interface(check_findings, output_options, global_provider)
|
||||
except Exception:
|
||||
sys.exit(1)
|
||||
|
||||
return all_findings
|
||||
|
||||
|
||||
def execute(
|
||||
service: str,
|
||||
check_name: str,
|
||||
check: Check,
|
||||
global_provider: Any,
|
||||
services_executed: set,
|
||||
checks_executed: set,
|
||||
custom_checks_metadata: Any,
|
||||
output_options: Any = None,
|
||||
):
|
||||
try:
|
||||
# Import check module
|
||||
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
|
||||
lib = import_check(check_module_path)
|
||||
# Recover functions from check
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
check_class = check_to_execute()
|
||||
"""
|
||||
Execute the check and report the findings
|
||||
|
||||
Args:
|
||||
service (str): service name
|
||||
check_name (str): check name
|
||||
global_provider (Any): provider object
|
||||
custom_checks_metadata (Any): custom checks metadata
|
||||
output_options (Any): output options, depending on the provider
|
||||
|
||||
Returns:
|
||||
list: list of findings
|
||||
"""
|
||||
try:
|
||||
# Update check metadata to reflect that in the outputs
|
||||
if custom_checks_metadata and custom_checks_metadata["Checks"].get(
|
||||
check_class.CheckID
|
||||
check.CheckID
|
||||
):
|
||||
check_class = update_check_metadata(
|
||||
check_class, custom_checks_metadata["Checks"][check_class.CheckID]
|
||||
check = update_check_metadata(
|
||||
check, custom_checks_metadata["Checks"][check.CheckID]
|
||||
)
|
||||
|
||||
# Run check
|
||||
verbose = (
|
||||
global_provider.output_options.verbose
|
||||
or global_provider.output_options.fixer
|
||||
)
|
||||
check_findings = run_check(
|
||||
check_class, verbose, global_provider.output_options.only_logs
|
||||
)
|
||||
only_logs = False
|
||||
if hasattr(output_options, "only_logs"):
|
||||
only_logs = output_options.only_logs
|
||||
|
||||
# Execute the check
|
||||
check_findings = []
|
||||
logger.debug(f"Executing check: {check.CheckID}")
|
||||
try:
|
||||
check_findings = check.execute()
|
||||
except Exception as error:
|
||||
if not only_logs:
|
||||
print(
|
||||
f"Something went wrong in {check.CheckID}, please use --log-level ERROR"
|
||||
)
|
||||
logger.error(
|
||||
f"{check.CheckID} -- {error.__class__.__name__}[{traceback.extract_tb(error.__traceback__)[-1].lineno}]: {error}"
|
||||
)
|
||||
|
||||
# Exclude findings per status
|
||||
if global_provider.output_options.status:
|
||||
if hasattr(output_options, "status") and output_options.status:
|
||||
check_findings = [
|
||||
finding
|
||||
for finding in check_findings
|
||||
if finding.status in global_provider.output_options.status
|
||||
if finding.status in output_options.status
|
||||
]
|
||||
|
||||
# Update Audit Status
|
||||
services_executed.add(service)
|
||||
checks_executed.add(check_name)
|
||||
global_provider.audit_metadata = update_audit_metadata(
|
||||
global_provider.audit_metadata, services_executed, checks_executed
|
||||
)
|
||||
|
||||
# Mutelist findings
|
||||
# Before returning the findings, we need to apply the mute list logic
|
||||
if hasattr(global_provider, "mutelist") and global_provider.mutelist.mutelist:
|
||||
# TODO: make this prettier
|
||||
is_finding_muted_args = {}
|
||||
if global_provider.type == "aws":
|
||||
is_finding_muted_args["aws_account_id"] = (
|
||||
@@ -738,27 +642,9 @@ def execute(
|
||||
**is_finding_muted_args
|
||||
)
|
||||
|
||||
# Refactor(Outputs)
|
||||
# Report the check's findings
|
||||
report(check_findings, global_provider)
|
||||
|
||||
# Refactor(Outputs)
|
||||
if os.environ.get("PROWLER_REPORT_LIB_PATH"):
|
||||
try:
|
||||
logger.info("Using custom report interface ...")
|
||||
lib = os.environ["PROWLER_REPORT_LIB_PATH"]
|
||||
outputs_module = importlib.import_module(lib)
|
||||
custom_report_interface = getattr(outputs_module, "report")
|
||||
|
||||
# TODO: review this call and see if we can remove the global_provider.output_options since it is contained in the global_provider
|
||||
custom_report_interface(
|
||||
check_findings, global_provider.output_options, global_provider
|
||||
)
|
||||
except Exception:
|
||||
sys.exit(1)
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
|
||||
f"Check '{check.CheckID}' was not found for the {global_provider.type.upper()} provider"
|
||||
)
|
||||
check_findings = []
|
||||
except Exception as error:
|
||||
@@ -788,34 +674,3 @@ def update_audit_metadata(
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def recover_checks_from_service(service_list: list, provider: str) -> set:
|
||||
"""
|
||||
Recover all checks from the selected provider and service
|
||||
|
||||
Returns a set of checks from the given services
|
||||
"""
|
||||
try:
|
||||
checks = set()
|
||||
service_list = [
|
||||
"awslambda" if service == "lambda" else service for service in service_list
|
||||
]
|
||||
for service in service_list:
|
||||
service_checks = recover_checks_from_provider(provider, service)
|
||||
if not service_checks:
|
||||
logger.error(f"Service '{service}' does not have checks.")
|
||||
|
||||
else:
|
||||
for check in service_checks:
|
||||
# Recover check name and module name from import path
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check[0].split(".")[-1]
|
||||
# If the service is present in the group list passed as parameters
|
||||
# if service_name in group_list: checks_from_arn.add(check_name)
|
||||
checks.add(check_name)
|
||||
return checks
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
@@ -1,35 +1,33 @@
|
||||
from colorama import Fore, Style
|
||||
|
||||
from prowler.config.config import valid_severities
|
||||
from prowler.lib.check.check import (
|
||||
parse_checks_from_compliance_framework,
|
||||
parse_checks_from_file,
|
||||
recover_checks_from_provider,
|
||||
recover_checks_from_service,
|
||||
)
|
||||
from prowler.lib.check.check import parse_checks_from_file
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata, Severity
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
# Generate the list of checks to execute
|
||||
def load_checks_to_execute(
|
||||
bulk_checks_metadata: dict,
|
||||
bulk_compliance_frameworks: dict,
|
||||
checks_file: str,
|
||||
check_list: list,
|
||||
service_list: list,
|
||||
severities: list,
|
||||
compliance_frameworks: list,
|
||||
categories: set,
|
||||
provider: str,
|
||||
bulk_checks_metadata: dict = None,
|
||||
bulk_compliance_frameworks: dict = None,
|
||||
checks_file: str = None,
|
||||
check_list: list = None,
|
||||
service_list: list = None,
|
||||
severities: list = None,
|
||||
compliance_frameworks: list = None,
|
||||
categories: set = None,
|
||||
) -> set:
|
||||
"""Generate the list of checks to execute based on the cloud provider and the input arguments given"""
|
||||
try:
|
||||
# Local subsets
|
||||
checks_to_execute = set()
|
||||
check_aliases = {}
|
||||
check_severities = {key: [] for key in valid_severities}
|
||||
check_categories = {}
|
||||
check_severities = {severity.value: [] for severity in Severity}
|
||||
|
||||
if not bulk_checks_metadata:
|
||||
bulk_checks_metadata = CheckMetadata.get_bulk(provider=provider)
|
||||
# First, loop over the bulk_checks_metadata to extract the needed subsets
|
||||
for check, metadata in bulk_checks_metadata.items():
|
||||
try:
|
||||
@@ -64,24 +62,41 @@ def load_checks_to_execute(
|
||||
checks_to_execute.update(check_severities[severity])
|
||||
|
||||
if service_list:
|
||||
checks_to_execute = (
|
||||
recover_checks_from_service(service_list, provider)
|
||||
& checks_to_execute
|
||||
)
|
||||
|
||||
for service in service_list:
|
||||
checks_to_execute = (
|
||||
set(
|
||||
CheckMetadata.list(
|
||||
bulk_checks_metadata=bulk_checks_metadata,
|
||||
service=service,
|
||||
)
|
||||
)
|
||||
& checks_to_execute
|
||||
)
|
||||
# Handle if there are checks passed using -C/--checks-file
|
||||
elif checks_file:
|
||||
checks_to_execute = parse_checks_from_file(checks_file, provider)
|
||||
|
||||
# Handle if there are services passed using -s/--services
|
||||
elif service_list:
|
||||
checks_to_execute = recover_checks_from_service(service_list, provider)
|
||||
for service in service_list:
|
||||
checks_to_execute.update(
|
||||
CheckMetadata.list(
|
||||
bulk_checks_metadata=bulk_checks_metadata,
|
||||
service=service,
|
||||
)
|
||||
)
|
||||
|
||||
# Handle if there are compliance frameworks passed using --compliance
|
||||
elif compliance_frameworks:
|
||||
checks_to_execute = parse_checks_from_compliance_framework(
|
||||
compliance_frameworks, bulk_compliance_frameworks
|
||||
)
|
||||
if not bulk_compliance_frameworks:
|
||||
bulk_compliance_frameworks = Compliance.get_bulk(provider=provider)
|
||||
for compliance_framework in compliance_frameworks:
|
||||
checks_to_execute.update(
|
||||
CheckMetadata.list(
|
||||
bulk_compliance_frameworks=bulk_compliance_frameworks,
|
||||
compliance_framework=compliance_framework,
|
||||
)
|
||||
)
|
||||
|
||||
# Handle if there are categories passed using --categories
|
||||
elif categories:
|
||||
@@ -90,20 +105,30 @@ def load_checks_to_execute(
|
||||
|
||||
# If there are no checks passed as argument
|
||||
else:
|
||||
# Get all check modules to run with the specific provider
|
||||
checks = recover_checks_from_provider(provider)
|
||||
|
||||
for check_info in checks:
|
||||
# Recover check name from import path (last part)
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check_info[0]
|
||||
# get all checks
|
||||
for check_name in CheckMetadata.list(
|
||||
bulk_checks_metadata=bulk_checks_metadata
|
||||
):
|
||||
checks_to_execute.add(check_name)
|
||||
|
||||
# Only execute threat detection checks if threat-detection category is set
|
||||
if "threat-detection" not in categories:
|
||||
if categories and categories != [] and "threat-detection" not in categories:
|
||||
for threat_detection_check in check_categories.get("threat-detection", []):
|
||||
checks_to_execute.discard(threat_detection_check)
|
||||
|
||||
# Exclude check cloudwatch_log_group_no_secrets_in_logs from the checks to execute if not in check_list
|
||||
if (
|
||||
check_list
|
||||
and "cloudwatch_log_group_no_secrets_in_logs" not in check_list
|
||||
and "cloudwatch_log_group_no_secrets_in_logs" in checks_to_execute
|
||||
):
|
||||
checks_to_execute.remove("cloudwatch_log_group_no_secrets_in_logs")
|
||||
# Exclude check cloudwatch_log_group_no_critical_pii_in_logs from the checks to execute if not in check_list
|
||||
if (
|
||||
check_list
|
||||
and "cloudwatch_log_group_no_critical_pii_in_logs" not in check_list
|
||||
and "cloudwatch_log_group_no_critical_pii_in_logs" in checks_to_execute
|
||||
):
|
||||
checks_to_execute.remove("cloudwatch_log_group_no_critical_pii_in_logs")
|
||||
# Check Aliases
|
||||
checks_to_execute = update_checks_to_execute_with_aliases(
|
||||
checks_to_execute, check_aliases
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import sys
|
||||
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
@@ -26,8 +26,8 @@ def update_checks_metadata_with_compliance(
|
||||
if check in requirement.Checks:
|
||||
# Include the requirement into the check's framework requirements
|
||||
compliance_requirements.append(requirement)
|
||||
# Create the Compliance_Model
|
||||
compliance = ComplianceBaseModel(
|
||||
# Create the Compliance
|
||||
compliance = Compliance(
|
||||
Framework=framework.Framework,
|
||||
Provider=framework.Provider,
|
||||
Version=framework.Version,
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import os
|
||||
import sys
|
||||
from enum import Enum
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel, ValidationError, root_validator
|
||||
|
||||
from prowler.lib.check.utils import list_compliance_modules
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
@@ -167,6 +169,19 @@ class Mitre_Requirement(BaseModel):
|
||||
Checks: list[str]
|
||||
|
||||
|
||||
# KISA-ISMS-P Requirement Attribute
|
||||
class KISA_ISMSP_Requirement_Attribute(BaseModel):
|
||||
"""KISA ISMS-P Requirement Attribute"""
|
||||
|
||||
Domain: str
|
||||
Subdomain: str
|
||||
Section: str
|
||||
AuditChecklist: Optional[list[str]]
|
||||
RelatedRegulations: Optional[list[str]]
|
||||
AuditEvidence: Optional[list[str]]
|
||||
NonComplianceCases: Optional[list[str]]
|
||||
|
||||
|
||||
# Base Compliance Model
|
||||
# TODO: move this to compliance folder
|
||||
class Compliance_Requirement(BaseModel):
|
||||
@@ -181,6 +196,7 @@ class Compliance_Requirement(BaseModel):
|
||||
ENS_Requirement_Attribute,
|
||||
ISO27001_2013_Requirement_Attribute,
|
||||
AWS_Well_Architected_Requirement_Attribute,
|
||||
KISA_ISMSP_Requirement_Attribute,
|
||||
# Generic_Compliance_Requirement_Attribute must be the last one since it is the fallback for generic compliance framework
|
||||
Generic_Compliance_Requirement_Attribute,
|
||||
]
|
||||
@@ -188,8 +204,8 @@ class Compliance_Requirement(BaseModel):
|
||||
Checks: list[str]
|
||||
|
||||
|
||||
class ComplianceBaseModel(BaseModel):
|
||||
"""ComplianceBaseModel holds the base model for every compliance framework"""
|
||||
class Compliance(BaseModel):
|
||||
"""Compliance holds the base model for every compliance framework"""
|
||||
|
||||
Framework: str
|
||||
Provider: str
|
||||
@@ -213,16 +229,137 @@ class ComplianceBaseModel(BaseModel):
|
||||
raise ValueError("Framework or Provider must not be empty")
|
||||
return values
|
||||
|
||||
@staticmethod
|
||||
def list(bulk_compliance_frameworks: dict, provider: str = None) -> list[str]:
|
||||
"""
|
||||
Returns a list of compliance frameworks from bulk compliance frameworks
|
||||
|
||||
Args:
|
||||
bulk_compliance_frameworks (dict): The bulk compliance frameworks
|
||||
provider (str): The provider name
|
||||
|
||||
Returns:
|
||||
list: The list of compliance frameworks
|
||||
"""
|
||||
if provider:
|
||||
compliance_frameworks = [
|
||||
compliance_framework
|
||||
for compliance_framework in bulk_compliance_frameworks.keys()
|
||||
if provider in compliance_framework
|
||||
]
|
||||
else:
|
||||
compliance_frameworks = [
|
||||
compliance_framework
|
||||
for compliance_framework in bulk_compliance_frameworks.keys()
|
||||
]
|
||||
|
||||
return compliance_frameworks
|
||||
|
||||
@staticmethod
|
||||
def get(
|
||||
bulk_compliance_frameworks: dict, compliance_framework_name: str
|
||||
) -> "Compliance":
|
||||
"""
|
||||
Returns a compliance framework from bulk compliance frameworks
|
||||
|
||||
Args:
|
||||
bulk_compliance_frameworks (dict): The bulk compliance frameworks
|
||||
compliance_framework_name (str): The compliance framework name
|
||||
|
||||
Returns:
|
||||
Compliance: The compliance framework
|
||||
"""
|
||||
return bulk_compliance_frameworks.get(compliance_framework_name, None)
|
||||
|
||||
@staticmethod
|
||||
def list_requirements(
|
||||
bulk_compliance_frameworks: dict, compliance_framework: str = None
|
||||
) -> list:
|
||||
"""
|
||||
Returns a list of compliance requirements from a compliance framework
|
||||
|
||||
Args:
|
||||
bulk_compliance_frameworks (dict): The bulk compliance frameworks
|
||||
compliance_framework (str): The compliance framework name
|
||||
|
||||
Returns:
|
||||
list: The list of compliance requirements for the provided compliance framework
|
||||
"""
|
||||
compliance_requirements = []
|
||||
|
||||
if bulk_compliance_frameworks and compliance_framework:
|
||||
compliance_requirements = [
|
||||
compliance_requirement.Id
|
||||
for compliance_requirement in bulk_compliance_frameworks.get(
|
||||
compliance_framework
|
||||
).Requirements
|
||||
]
|
||||
|
||||
return compliance_requirements
|
||||
|
||||
@staticmethod
|
||||
def get_requirement(
|
||||
bulk_compliance_frameworks: dict, compliance_framework: str, requirement_id: str
|
||||
) -> Union[Mitre_Requirement, Compliance_Requirement]:
|
||||
"""
|
||||
Returns a compliance requirement from a compliance framework
|
||||
|
||||
Args:
|
||||
bulk_compliance_frameworks (dict): The bulk compliance frameworks
|
||||
compliance_framework (str): The compliance framework name
|
||||
requirement_id (str): The compliance requirement ID
|
||||
|
||||
Returns:
|
||||
Mitre_Requirement | Compliance_Requirement: The compliance requirement
|
||||
"""
|
||||
requirement = None
|
||||
for compliance_requirement in bulk_compliance_frameworks.get(
|
||||
compliance_framework
|
||||
).Requirements:
|
||||
if compliance_requirement.Id == requirement_id:
|
||||
requirement = compliance_requirement
|
||||
break
|
||||
|
||||
return requirement
|
||||
|
||||
@staticmethod
|
||||
def get_bulk(provider: str) -> dict:
|
||||
"""Bulk load all compliance frameworks specification into a dict"""
|
||||
try:
|
||||
bulk_compliance_frameworks = {}
|
||||
available_compliance_framework_modules = list_compliance_modules()
|
||||
for compliance_framework in available_compliance_framework_modules:
|
||||
if provider in compliance_framework.name:
|
||||
compliance_specification_dir_path = (
|
||||
f"{compliance_framework.module_finder.path}/{provider}"
|
||||
)
|
||||
# for compliance_framework in available_compliance_framework_modules:
|
||||
for filename in os.listdir(compliance_specification_dir_path):
|
||||
file_path = os.path.join(
|
||||
compliance_specification_dir_path, filename
|
||||
)
|
||||
# Check if it is a file and ti size is greater than 0
|
||||
if os.path.isfile(file_path) and os.stat(file_path).st_size > 0:
|
||||
# Open Compliance file in JSON
|
||||
# cis_v1.4_aws.json --> cis_v1.4_aws
|
||||
compliance_framework_name = filename.split(".json")[0]
|
||||
# Store the compliance info
|
||||
bulk_compliance_frameworks[compliance_framework_name] = (
|
||||
load_compliance_framework(file_path)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
return bulk_compliance_frameworks
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_compliance_framework(
|
||||
compliance_specification_file: str,
|
||||
) -> ComplianceBaseModel:
|
||||
) -> Compliance:
|
||||
"""load_compliance_framework loads and parse a Compliance Framework Specification"""
|
||||
try:
|
||||
compliance_framework = ComplianceBaseModel.parse_file(
|
||||
compliance_specification_file
|
||||
)
|
||||
compliance_framework = Compliance.parse_file(compliance_specification_file)
|
||||
except ValidationError as error:
|
||||
logger.critical(
|
||||
f"Compliance Framework Specification from {compliance_specification_file} is not valid: {error}"
|
||||
|
||||
@@ -3,7 +3,7 @@ import sys
|
||||
import yaml
|
||||
from jsonschema import validate
|
||||
|
||||
from prowler.config.config import valid_severities
|
||||
from prowler.lib.check.models import Severity
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
custom_checks_metadata_schema = {
|
||||
@@ -17,7 +17,7 @@ custom_checks_metadata_schema = {
|
||||
"properties": {
|
||||
"Severity": {
|
||||
"type": "string",
|
||||
"enum": valid_severities,
|
||||
"enum": [severity.value for severity in Severity],
|
||||
},
|
||||
"CheckTitle": {
|
||||
"type": "string",
|
||||
|
||||
@@ -1,17 +1,30 @@
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Set
|
||||
|
||||
from pydantic import BaseModel, ValidationError, validator
|
||||
|
||||
from prowler.config.config import valid_severities
|
||||
from prowler.config.config import Provider
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.utils import recover_checks_from_provider
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
class Code(BaseModel):
|
||||
"""Check's remediation information using IaC like CloudFormation, Terraform or the native CLI"""
|
||||
"""
|
||||
Represents the remediation code using IaC like CloudFormation, Terraform or the native CLI.
|
||||
|
||||
Attributes:
|
||||
NativeIaC (str): The NativeIaC code.
|
||||
Terraform (str): The Terraform code.
|
||||
CLI (str): The CLI code.
|
||||
Other (str): Other code.
|
||||
"""
|
||||
|
||||
NativeIaC: str
|
||||
Terraform: str
|
||||
@@ -20,21 +33,69 @@ class Code(BaseModel):
|
||||
|
||||
|
||||
class Recommendation(BaseModel):
|
||||
"""Check's recommendation information"""
|
||||
"""
|
||||
Represents a recommendation.
|
||||
|
||||
Attributes:
|
||||
Text (str): The text of the recommendation.
|
||||
Url (str): The URL associated with the recommendation.
|
||||
"""
|
||||
|
||||
Text: str
|
||||
Url: str
|
||||
|
||||
|
||||
class Remediation(BaseModel):
|
||||
"""Check's remediation: Code and Recommendation"""
|
||||
"""
|
||||
Represents a remediation action for a specific .
|
||||
|
||||
Attributes:
|
||||
Code (Code): The code associated with the remediation action.
|
||||
Recommendation (Recommendation): The recommendation for the remediation action.
|
||||
"""
|
||||
|
||||
Code: Code
|
||||
Recommendation: Recommendation
|
||||
|
||||
|
||||
class Check_Metadata_Model(BaseModel):
|
||||
"""Check Metadata Model"""
|
||||
class Severity(str, Enum):
|
||||
critical = "critical"
|
||||
high = "high"
|
||||
medium = "medium"
|
||||
low = "low"
|
||||
informational = "informational"
|
||||
|
||||
|
||||
class CheckMetadata(BaseModel):
|
||||
"""
|
||||
Model representing the metadata of a check.
|
||||
|
||||
Attributes:
|
||||
Provider (str): The provider of the check.
|
||||
CheckID (str): The ID of the check.
|
||||
CheckTitle (str): The title of the check.
|
||||
CheckType (list[str]): The type of the check.
|
||||
CheckAliases (list[str], optional): The aliases of the check. Defaults to an empty list.
|
||||
ServiceName (str): The name of the service.
|
||||
SubServiceName (str): The name of the sub-service.
|
||||
ResourceIdTemplate (str): The template for the resource ID.
|
||||
Severity (str): The severity of the check.
|
||||
ResourceType (str): The type of the resource.
|
||||
Description (str): The description of the check.
|
||||
Risk (str): The risk associated with the check.
|
||||
RelatedUrl (str): The URL related to the check.
|
||||
Remediation (Remediation): The remediation steps for the check.
|
||||
Categories (list[str]): The categories of the check.
|
||||
DependsOn (list[str]): The dependencies of the check.
|
||||
RelatedTo (list[str]): The related checks.
|
||||
Notes (str): Additional notes for the check.
|
||||
Compliance (list, optional): The compliance information for the check. Defaults to None.
|
||||
|
||||
Validators:
|
||||
valid_category(value): Validator function to validate the categories of the check.
|
||||
severity_to_lower(severity): Validator function to convert the severity to lowercase.
|
||||
valid_severity(severity): Validator function to validate the severity of the check.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
CheckID: str
|
||||
@@ -44,7 +105,7 @@ class Check_Metadata_Model(BaseModel):
|
||||
ServiceName: str
|
||||
SubServiceName: str
|
||||
ResourceIdTemplate: str
|
||||
Severity: str
|
||||
Severity: Severity
|
||||
ResourceType: str
|
||||
Description: str
|
||||
Risk: str
|
||||
@@ -73,16 +134,244 @@ class Check_Metadata_Model(BaseModel):
|
||||
def severity_to_lower(severity):
|
||||
return severity.lower()
|
||||
|
||||
@validator("Severity")
|
||||
def valid_severity(severity):
|
||||
if severity not in valid_severities:
|
||||
raise ValueError(
|
||||
f"Invalid severity: {severity}. Severity must be one of {', '.join(valid_severities)}"
|
||||
@staticmethod
|
||||
def get_bulk(provider: str) -> dict[str, "CheckMetadata"]:
|
||||
"""
|
||||
Load the metadata of all checks for a given provider reading the check's metadata files.
|
||||
Args:
|
||||
provider (str): The name of the provider.
|
||||
Returns:
|
||||
dict[str, CheckMetadata]: A dictionary containing the metadata of all checks, with the CheckID as the key.
|
||||
"""
|
||||
|
||||
bulk_check_metadata = {}
|
||||
checks = recover_checks_from_provider(provider)
|
||||
# Build list of check's metadata files
|
||||
for check_info in checks:
|
||||
# Build check path name
|
||||
check_name = check_info[0]
|
||||
check_path = check_info[1]
|
||||
# Ignore fixer files
|
||||
if check_name.endswith("_fixer"):
|
||||
continue
|
||||
# Append metadata file extension
|
||||
metadata_file = f"{check_path}/{check_name}.metadata.json"
|
||||
# Load metadata
|
||||
check_metadata = load_check_metadata(metadata_file)
|
||||
bulk_check_metadata[check_metadata.CheckID] = check_metadata
|
||||
|
||||
return bulk_check_metadata
|
||||
|
||||
@staticmethod
|
||||
def list(
|
||||
bulk_checks_metadata: dict = None,
|
||||
bulk_compliance_frameworks: dict = None,
|
||||
provider: str = None,
|
||||
severity: str = None,
|
||||
category: str = None,
|
||||
service: str = None,
|
||||
compliance_framework: str = None,
|
||||
) -> Set["CheckMetadata"]:
|
||||
"""
|
||||
Returns a set of checks from the bulk checks metadata.
|
||||
|
||||
Args:
|
||||
provider (str): The provider of the checks.
|
||||
bulk_checks_metadata (dict): The bulk checks metadata.
|
||||
bulk_compliance_frameworks (dict): The bulk compliance frameworks.
|
||||
severity (str): The severity of the checks.
|
||||
category (str): The category of the checks.
|
||||
service (str): The service of the checks.
|
||||
compliance_framework (str): The compliance framework of the checks.
|
||||
|
||||
Returns:
|
||||
set: A set of checks.
|
||||
"""
|
||||
checks_from_provider = set()
|
||||
checks_from_severity = set()
|
||||
checks_from_category = set()
|
||||
checks_from_service = set()
|
||||
checks_from_compliance_framework = set()
|
||||
# If the bulk checks metadata is not provided, get it
|
||||
if not bulk_checks_metadata:
|
||||
bulk_checks_metadata = {}
|
||||
available_providers = [p.value for p in Provider]
|
||||
for provider_name in available_providers:
|
||||
bulk_checks_metadata.update(CheckMetadata.get_bulk(provider_name))
|
||||
if provider:
|
||||
checks_from_provider = {
|
||||
check_name
|
||||
for check_name, check_metadata in bulk_checks_metadata.items()
|
||||
if check_metadata.Provider == provider
|
||||
}
|
||||
if severity:
|
||||
checks_from_severity = CheckMetadata.list_by_severity(
|
||||
bulk_checks_metadata=bulk_checks_metadata, severity=severity
|
||||
)
|
||||
return severity
|
||||
if category:
|
||||
checks_from_category = CheckMetadata.list_by_category(
|
||||
bulk_checks_metadata=bulk_checks_metadata, category=category
|
||||
)
|
||||
if service:
|
||||
checks_from_service = CheckMetadata.list_by_service(
|
||||
bulk_checks_metadata=bulk_checks_metadata, service=service
|
||||
)
|
||||
if compliance_framework:
|
||||
# Loaded here, as it is not always needed
|
||||
if not bulk_compliance_frameworks:
|
||||
bulk_compliance_frameworks = {}
|
||||
available_providers = [p.value for p in Provider]
|
||||
for provider in available_providers:
|
||||
bulk_compliance_frameworks = Compliance.get_bulk(provider=provider)
|
||||
checks_from_compliance_framework = (
|
||||
CheckMetadata.list_by_compliance_framework(
|
||||
bulk_compliance_frameworks=bulk_compliance_frameworks,
|
||||
compliance_framework=compliance_framework,
|
||||
)
|
||||
)
|
||||
|
||||
# Get all the checks:
|
||||
checks = set(bulk_checks_metadata.keys())
|
||||
# Get the intersection of the checks
|
||||
if len(checks_from_provider) > 0 or provider:
|
||||
checks = checks & checks_from_provider
|
||||
if len(checks_from_severity) > 0 or severity:
|
||||
checks = checks & checks_from_severity
|
||||
if len(checks_from_category) > 0 or category:
|
||||
checks = checks & checks_from_category
|
||||
if len(checks_from_service) > 0 or service:
|
||||
checks = checks & checks_from_service
|
||||
if len(checks_from_compliance_framework) > 0 or compliance_framework:
|
||||
checks = checks & checks_from_compliance_framework
|
||||
|
||||
return checks
|
||||
|
||||
@staticmethod
|
||||
def get(bulk_checks_metadata: dict, check_id: str) -> "CheckMetadata":
|
||||
"""
|
||||
Returns the check metadata from the bulk checks metadata.
|
||||
|
||||
Args:
|
||||
bulk_checks_metadata (dict): The bulk checks metadata.
|
||||
check_id (str): The check ID.
|
||||
|
||||
Returns:
|
||||
CheckMetadata: The check metadata.
|
||||
"""
|
||||
|
||||
return bulk_checks_metadata.get(check_id, None)
|
||||
|
||||
@staticmethod
|
||||
def list_by_severity(bulk_checks_metadata: dict, severity: str = None) -> set:
|
||||
"""
|
||||
Returns a set of checks by severity from the bulk checks metadata.
|
||||
|
||||
Args:
|
||||
bulk_checks_metadata (dict): The bulk checks metadata.
|
||||
severity (str): The severity.
|
||||
|
||||
Returns:
|
||||
set: A set of checks by severity.
|
||||
"""
|
||||
checks = set()
|
||||
|
||||
if severity:
|
||||
checks = {
|
||||
check_name
|
||||
for check_name, check_metadata in bulk_checks_metadata.items()
|
||||
if check_metadata.Severity == severity
|
||||
}
|
||||
|
||||
return checks
|
||||
|
||||
@staticmethod
|
||||
def list_by_category(bulk_checks_metadata: dict, category: str = None) -> set:
|
||||
"""
|
||||
Returns a set of checks by category from the bulk checks metadata.
|
||||
|
||||
Args:
|
||||
bulk_checks_metadata (dict): The bulk checks metadata.
|
||||
category (str): The category.
|
||||
|
||||
Returns:
|
||||
set: A set of checks by category.
|
||||
"""
|
||||
checks = set()
|
||||
|
||||
if category:
|
||||
checks = {
|
||||
check_name
|
||||
for check_name, check_metadata in bulk_checks_metadata.items()
|
||||
if category in check_metadata.Categories
|
||||
}
|
||||
|
||||
return checks
|
||||
|
||||
@staticmethod
|
||||
def list_by_service(bulk_checks_metadata: dict, service: str = None) -> set:
|
||||
"""
|
||||
Returns a set of checks by service from the bulk checks metadata.
|
||||
|
||||
Args:
|
||||
bulk_checks_metadata (dict): The bulk checks metadata.
|
||||
service (str): The service.
|
||||
|
||||
Returns:
|
||||
set: A set of checks by service.
|
||||
"""
|
||||
checks = set()
|
||||
|
||||
if service:
|
||||
if service == "lambda":
|
||||
service = "awslambda"
|
||||
checks = {
|
||||
check_name
|
||||
for check_name, check_metadata in bulk_checks_metadata.items()
|
||||
if check_metadata.ServiceName == service
|
||||
}
|
||||
|
||||
return checks
|
||||
|
||||
@staticmethod
|
||||
def list_by_compliance_framework(
|
||||
bulk_compliance_frameworks: dict, compliance_framework: str = None
|
||||
) -> set:
|
||||
"""
|
||||
Returns a set of checks by compliance framework from the bulk compliance frameworks.
|
||||
|
||||
Args:
|
||||
bulk_compliance_frameworks (dict): The bulk compliance frameworks.
|
||||
compliance_framework (str): The compliance framework.
|
||||
|
||||
Returns:
|
||||
set: A set of checks by compliance framework.
|
||||
"""
|
||||
checks = set()
|
||||
|
||||
if compliance_framework:
|
||||
try:
|
||||
checks_from_framework_list = [
|
||||
requirement.Checks
|
||||
for requirement in bulk_compliance_frameworks[
|
||||
compliance_framework
|
||||
].Requirements
|
||||
]
|
||||
# Reduce nested list into a list
|
||||
# Pythonic functional magic
|
||||
checks_from_framework = functools.reduce(
|
||||
lambda x, y: x + y, checks_from_framework_list
|
||||
)
|
||||
# Then union this list of checks with the initial one
|
||||
checks = checks.union(checks_from_framework)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}"
|
||||
)
|
||||
|
||||
return checks
|
||||
|
||||
|
||||
class Check(ABC, Check_Metadata_Model):
|
||||
class Check(ABC, CheckMetadata):
|
||||
"""Prowler Check"""
|
||||
|
||||
def __init__(self, **data):
|
||||
@@ -93,7 +382,7 @@ class Check(ABC, Check_Metadata_Model):
|
||||
+ ".metadata.json"
|
||||
)
|
||||
# Store it to validate them with Pydantic
|
||||
data = Check_Metadata_Model.parse_file(metadata_file).dict()
|
||||
data = CheckMetadata.parse_file(metadata_file).dict()
|
||||
# Calls parents init function
|
||||
super().__init__(**data)
|
||||
# TODO: verify that the CheckID is the same as the filename and classname
|
||||
@@ -114,14 +403,14 @@ class Check_Report:
|
||||
|
||||
status: str
|
||||
status_extended: str
|
||||
check_metadata: Check_Metadata_Model
|
||||
check_metadata: CheckMetadata
|
||||
resource_details: str
|
||||
resource_tags: list
|
||||
muted: bool
|
||||
|
||||
def __init__(self, metadata):
|
||||
self.status = ""
|
||||
self.check_metadata = Check_Metadata_Model.parse_raw(metadata)
|
||||
self.check_metadata = CheckMetadata.parse_raw(metadata)
|
||||
self.status_extended = ""
|
||||
self.resource_details = ""
|
||||
self.resource_tags = []
|
||||
@@ -194,12 +483,22 @@ class Check_Report_Kubernetes(Check_Report):
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_check_metadata(metadata_file: str) -> Check_Metadata_Model:
|
||||
"""load_check_metadata loads and parse a Check's metadata file"""
|
||||
def load_check_metadata(metadata_file: str) -> CheckMetadata:
|
||||
"""
|
||||
Load check metadata from a file.
|
||||
Args:
|
||||
metadata_file (str): The path to the metadata file.
|
||||
Returns:
|
||||
CheckMetadata: The loaded check metadata.
|
||||
Raises:
|
||||
ValidationError: If the metadata file is not valid.
|
||||
"""
|
||||
|
||||
try:
|
||||
check_metadata = Check_Metadata_Model.parse_file(metadata_file)
|
||||
check_metadata = CheckMetadata.parse_file(metadata_file)
|
||||
except ValidationError as error:
|
||||
logger.critical(f"Metadata from {metadata_file} is not valid: {error}")
|
||||
# TODO: remove this exit and raise an exception
|
||||
sys.exit(1)
|
||||
else:
|
||||
return check_metadata
|
||||
|
||||
95
prowler/lib/check/utils.py
Normal file
95
prowler/lib/check/utils.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import importlib
|
||||
import sys
|
||||
from pkgutil import walk_packages
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
def recover_checks_from_provider(
|
||||
provider: str, service: str = None, include_fixers: bool = False
|
||||
) -> list[tuple]:
|
||||
"""
|
||||
Recover all checks from the selected provider and service
|
||||
|
||||
Returns a list of tuples with the following format (check_name, check_path)
|
||||
"""
|
||||
try:
|
||||
checks = []
|
||||
modules = list_modules(provider, service)
|
||||
for module_name in modules:
|
||||
# Format: "prowler.providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_module_name = module_name.name
|
||||
# We need to exclude common shared libraries in services
|
||||
if (
|
||||
check_module_name.count(".") == 6
|
||||
and "lib" not in check_module_name
|
||||
and (not check_module_name.endswith("_fixer") or include_fixers)
|
||||
):
|
||||
check_path = module_name.module_finder.path
|
||||
# Check name is the last part of the check_module_name
|
||||
check_name = check_module_name.split(".")[-1]
|
||||
check_info = (check_name, check_path)
|
||||
checks.append(check_info)
|
||||
except ModuleNotFoundError:
|
||||
logger.critical(f"Service {service} was not found for the {provider} provider.")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
return checks
|
||||
|
||||
|
||||
# List all available modules in the selected provider and service
|
||||
def list_modules(provider: str, service: str):
|
||||
# This module path requires the full path including "prowler."
|
||||
module_path = f"prowler.providers.{provider}.services"
|
||||
if service:
|
||||
module_path += f".{service}"
|
||||
return walk_packages(
|
||||
importlib.import_module(module_path).__path__,
|
||||
importlib.import_module(module_path).__name__ + ".",
|
||||
)
|
||||
|
||||
|
||||
def recover_checks_from_service(service_list: list, provider: str) -> set:
|
||||
"""
|
||||
Recover all checks from the selected provider and service
|
||||
|
||||
Returns a set of checks from the given services
|
||||
"""
|
||||
try:
|
||||
checks = set()
|
||||
service_list = [
|
||||
"awslambda" if service == "lambda" else service for service in service_list
|
||||
]
|
||||
for service in service_list:
|
||||
service_checks = recover_checks_from_provider(provider, service)
|
||||
if not service_checks:
|
||||
logger.error(f"Service '{service}' does not have checks.")
|
||||
|
||||
else:
|
||||
for check in service_checks:
|
||||
# Recover check name and module name from import path
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check[0].split(".")[-1]
|
||||
# If the service is present in the group list passed as parameters
|
||||
# if service_name in group_list: checks_from_arn.add(check_name)
|
||||
checks.add(check_name)
|
||||
return checks
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def list_compliance_modules():
|
||||
"""
|
||||
list_compliance_modules returns the available compliance frameworks and returns their path
|
||||
"""
|
||||
# This module path requires the full path including "prowler."
|
||||
module_path = "prowler.compliance"
|
||||
return walk_packages(
|
||||
importlib.import_module(module_path).__path__,
|
||||
importlib.import_module(module_path).__name__ + ".",
|
||||
)
|
||||
@@ -10,9 +10,9 @@ from prowler.config.config import (
|
||||
default_config_file_path,
|
||||
default_fixer_config_file_path,
|
||||
default_output_directory,
|
||||
finding_statuses,
|
||||
valid_severities,
|
||||
)
|
||||
from prowler.lib.check.models import Severity
|
||||
from prowler.lib.outputs.common import Status
|
||||
from prowler.providers.common.arguments import (
|
||||
init_providers_parser,
|
||||
validate_provider_arguments,
|
||||
@@ -138,8 +138,8 @@ Detailed documentation at https://docs.prowler.com
|
||||
common_outputs_parser.add_argument(
|
||||
"--status",
|
||||
nargs="+",
|
||||
help=f"Filter by the status of the findings {finding_statuses}",
|
||||
choices=finding_statuses,
|
||||
help=f"Filter by the status of the findings {[status.value for status in Status]}",
|
||||
choices=[status.value for status in Status],
|
||||
)
|
||||
common_outputs_parser.add_argument(
|
||||
"--output-formats",
|
||||
@@ -177,6 +177,12 @@ Detailed documentation at https://docs.prowler.com
|
||||
common_outputs_parser.add_argument(
|
||||
"--no-banner", "-b", action="store_true", help="Hide Prowler banner"
|
||||
)
|
||||
common_outputs_parser.add_argument(
|
||||
"--no-color",
|
||||
action="store_true",
|
||||
help="Disable color codes in output",
|
||||
)
|
||||
|
||||
common_outputs_parser.add_argument(
|
||||
"--unix-timestamp",
|
||||
action="store_true",
|
||||
@@ -257,8 +263,8 @@ Detailed documentation at https://docs.prowler.com
|
||||
"--severity",
|
||||
"--severities",
|
||||
nargs="+",
|
||||
help=f"Severities to be executed {valid_severities}",
|
||||
choices=valid_severities,
|
||||
help=f"Severities to be executed {[severity.value for severity in Severity]}",
|
||||
choices=[severity.value for severity in Severity],
|
||||
)
|
||||
group.add_argument(
|
||||
"--compliance",
|
||||
|
||||
@@ -5,6 +5,8 @@ import yaml
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.mutelist.models import mutelist_schema
|
||||
from prowler.lib.outputs.common import Status
|
||||
from prowler.lib.outputs.utils import unroll_dict, unroll_tags
|
||||
|
||||
|
||||
class Mutelist(ABC):
|
||||
@@ -237,6 +239,35 @@ class Mutelist(ABC):
|
||||
)
|
||||
return False
|
||||
|
||||
def mute_finding(self, finding):
|
||||
"""
|
||||
Check if the provided finding is muted
|
||||
|
||||
Args:
|
||||
finding (Finding): The finding to be evaluated for muting.
|
||||
|
||||
Returns:
|
||||
Finding: The finding with the status updated if it is muted, otherwise the finding is returned
|
||||
|
||||
"""
|
||||
try:
|
||||
if self.is_muted(
|
||||
finding.account_uid,
|
||||
finding.metadata.CheckID,
|
||||
finding.region,
|
||||
finding.resource_uid,
|
||||
unroll_dict(unroll_tags(finding.resource_tags)),
|
||||
):
|
||||
finding.raw["status"] = finding.status
|
||||
finding.status = Status.MUTED
|
||||
finding.muted = True
|
||||
return finding
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return finding
|
||||
|
||||
def is_excepted(
|
||||
self,
|
||||
exceptions,
|
||||
@@ -307,13 +338,16 @@ class Mutelist(ABC):
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def is_item_matched(matched_items, finding_items, tag=False):
|
||||
def is_item_matched(matched_items, finding_items, tag=False) -> bool:
|
||||
"""
|
||||
Check if any of the items in matched_items are present in finding_items.
|
||||
|
||||
Args:
|
||||
matched_items (list): List of items to be matched.
|
||||
finding_items (str): String to search for matched items.
|
||||
tag (bool): If True the search will have a different logic due to the tags being ANDed or ORed:
|
||||
- Check of AND logic -> True if all the tags are present in the finding.
|
||||
- Check of OR logic -> True if any of the tags is present in the finding.
|
||||
|
||||
Returns:
|
||||
bool: True if any of the matched_items are present in finding_items, otherwise False.
|
||||
@@ -321,17 +355,19 @@ class Mutelist(ABC):
|
||||
try:
|
||||
is_item_matched = False
|
||||
if matched_items and (finding_items or finding_items == ""):
|
||||
# If we use tags, we need to use re.search instead of re.match because we need to match the tags in the format key1=value1 | key2=value2
|
||||
if tag:
|
||||
operation = re.search
|
||||
else:
|
||||
operation = re.match
|
||||
is_item_matched = True
|
||||
for item in matched_items:
|
||||
if item.startswith("*"):
|
||||
item = ".*" + item[1:]
|
||||
if operation(item, finding_items):
|
||||
is_item_matched = True
|
||||
break
|
||||
if tag:
|
||||
if not re.search(item, finding_items):
|
||||
is_item_matched = False
|
||||
break
|
||||
else:
|
||||
if re.search(item, finding_items):
|
||||
is_item_matched = True
|
||||
break
|
||||
return is_item_matched
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
|
||||
@@ -25,7 +25,6 @@ class ASFF(Output):
|
||||
- transform(findings: list[Finding]) -> None: Transforms a list of findings into ASFF format.
|
||||
- batch_write_data_to_file() -> None: Writes the findings data to a file in JSON ASFF format.
|
||||
- generate_status(status: str, muted: bool = False) -> str: Generates the ASFF status based on the provided status and muted flag.
|
||||
- format_resource_tags(tags: str) -> dict: Transforms a string of tags into a dictionary format.
|
||||
|
||||
References:
|
||||
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
|
||||
@@ -62,7 +61,6 @@ class ASFF(Output):
|
||||
if finding.status == "MANUAL":
|
||||
continue
|
||||
timestamp = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
resource_tags = ASFF.format_resource_tags(finding.resource_tags)
|
||||
|
||||
associated_standards, compliance_summary = ASFF.format_compliance(
|
||||
finding.compliance
|
||||
@@ -70,36 +68,39 @@ class ASFF(Output):
|
||||
|
||||
# Ensures finding_status matches allowed values in ASFF
|
||||
finding_status = ASFF.generate_status(finding.status, finding.muted)
|
||||
|
||||
self._data.append(
|
||||
AWSSecurityFindingFormat(
|
||||
# The following line cannot be changed because it is the format we use to generate unique findings for AWS Security Hub
|
||||
# If changed some findings could be lost because the unique identifier will be different
|
||||
Id=f"prowler-{finding.check_id}-{finding.account_uid}-{finding.region}-{hash_sha512(finding.resource_uid)}",
|
||||
Id=f"prowler-{finding.metadata.CheckID}-{finding.account_uid}-{finding.region}-{hash_sha512(finding.resource_uid)}",
|
||||
ProductArn=f"arn:{finding.partition}:securityhub:{finding.region}::product/prowler/prowler",
|
||||
ProductFields=ProductFields(
|
||||
ProwlerResourceName=finding.resource_uid,
|
||||
),
|
||||
GeneratorId="prowler-" + finding.check_id,
|
||||
GeneratorId="prowler-" + finding.metadata.CheckID,
|
||||
AwsAccountId=finding.account_uid,
|
||||
Types=(
|
||||
finding.check_type.split(",")
|
||||
if finding.check_type
|
||||
finding.metadata.CheckType
|
||||
if finding.metadata.CheckType
|
||||
else ["Software and Configuration Checks"]
|
||||
),
|
||||
FirstObservedAt=timestamp,
|
||||
UpdatedAt=timestamp,
|
||||
CreatedAt=timestamp,
|
||||
Severity=Severity(Label=finding.severity.value),
|
||||
Title=finding.check_title,
|
||||
Description=finding.description,
|
||||
Severity=Severity(Label=finding.metadata.Severity.value),
|
||||
Title=finding.metadata.CheckTitle,
|
||||
Description=(
|
||||
(finding.status_extended[:1000] + "...")
|
||||
if len(finding.status_extended) > 1000
|
||||
else finding.status_extended
|
||||
),
|
||||
Resources=[
|
||||
Resource(
|
||||
Id=finding.resource_uid,
|
||||
Type=finding.resource_type,
|
||||
Type=finding.metadata.ResourceType,
|
||||
Partition=finding.partition,
|
||||
Region=finding.region,
|
||||
Tags=resource_tags,
|
||||
Tags=finding.resource_tags,
|
||||
)
|
||||
],
|
||||
Compliance=Compliance(
|
||||
@@ -109,8 +110,8 @@ class ASFF(Output):
|
||||
),
|
||||
Remediation=Remediation(
|
||||
Recommendation=Recommendation(
|
||||
Text=finding.remediation_recommendation_text,
|
||||
Url=finding.remediation_recommendation_url,
|
||||
Text=finding.metadata.Remediation.Recommendation.Text,
|
||||
Url=finding.metadata.Remediation.Recommendation.Url,
|
||||
)
|
||||
),
|
||||
)
|
||||
@@ -195,42 +196,6 @@ class ASFF(Output):
|
||||
|
||||
return json_asff_status
|
||||
|
||||
@staticmethod
|
||||
def format_resource_tags(tags: str) -> dict:
|
||||
"""
|
||||
Transforms a string of tags into a dictionary format.
|
||||
|
||||
Parameters:
|
||||
- tags (str): A string containing tags separated by ' | ' and key-value pairs separated by '='.
|
||||
|
||||
Returns:
|
||||
- dict: A dictionary where keys are tag names and values are tag values.
|
||||
|
||||
Notes:
|
||||
- If the input string is empty or None, it returns None.
|
||||
- Each tag in the input string should be in the format 'key=value'.
|
||||
- If the input string is not formatted correctly, it logs an error and returns None.
|
||||
"""
|
||||
try:
|
||||
tags_dict = None
|
||||
if tags:
|
||||
tags = tags.split(" | ")
|
||||
tags_dict = {}
|
||||
for tag in tags:
|
||||
value = tag.split("=")
|
||||
tags_dict[value[0]] = value[1]
|
||||
return tags_dict
|
||||
except IndexError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return None
|
||||
except AttributeError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def format_compliance(compliance: dict) -> tuple[list[dict], list[str]]:
|
||||
"""
|
||||
@@ -316,6 +281,12 @@ class Resource(BaseModel):
|
||||
Region: str
|
||||
Tags: Optional[dict]
|
||||
|
||||
@validator("Tags", pre=True, always=True)
|
||||
def tags_cannot_be_empty_dict(tags):
|
||||
if not tags:
|
||||
return None
|
||||
return tags
|
||||
|
||||
|
||||
class Compliance(BaseModel):
|
||||
"""
|
||||
|
||||
@@ -1,62 +1,26 @@
|
||||
from operator import attrgetter
|
||||
from enum import Enum
|
||||
|
||||
from prowler.config.config import timestamp
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.utils import unroll_list, unroll_tags
|
||||
from prowler.lib.outputs.utils import unroll_tags
|
||||
from prowler.lib.utils.utils import outputs_unix_timestamp
|
||||
|
||||
|
||||
def get_provider_data_mapping(provider) -> dict:
|
||||
data = {}
|
||||
for generic_field, provider_field in provider.get_output_mapping.items():
|
||||
try:
|
||||
provider_value = attrgetter(provider_field)(provider)
|
||||
data[generic_field] = provider_value
|
||||
except AttributeError:
|
||||
data[generic_field] = ""
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
# TODO: add test for outputs_unix_timestamp
|
||||
def fill_common_finding_data(finding: dict, unix_timestamp: bool) -> dict:
|
||||
finding_data = {
|
||||
"metadata": finding.check_metadata,
|
||||
"timestamp": outputs_unix_timestamp(unix_timestamp, timestamp),
|
||||
"check_id": finding.check_metadata.CheckID,
|
||||
"check_title": finding.check_metadata.CheckTitle,
|
||||
"check_type": ",".join(finding.check_metadata.CheckType),
|
||||
"status": finding.status,
|
||||
"status_extended": finding.status_extended,
|
||||
"muted": finding.muted,
|
||||
"service_name": finding.check_metadata.ServiceName,
|
||||
"subservice_name": finding.check_metadata.SubServiceName,
|
||||
"severity": finding.check_metadata.Severity,
|
||||
"resource_type": finding.check_metadata.ResourceType,
|
||||
"resource_details": finding.resource_details,
|
||||
"resource_tags": unroll_tags(finding.resource_tags),
|
||||
"description": finding.check_metadata.Description,
|
||||
"risk": finding.check_metadata.Risk,
|
||||
"related_url": finding.check_metadata.RelatedUrl,
|
||||
"remediation_recommendation_text": (
|
||||
finding.check_metadata.Remediation.Recommendation.Text
|
||||
),
|
||||
"remediation_recommendation_url": (
|
||||
finding.check_metadata.Remediation.Recommendation.Url
|
||||
),
|
||||
"remediation_code_nativeiac": (
|
||||
finding.check_metadata.Remediation.Code.NativeIaC
|
||||
),
|
||||
"remediation_code_terraform": (
|
||||
finding.check_metadata.Remediation.Code.Terraform
|
||||
),
|
||||
"remediation_code_cli": (finding.check_metadata.Remediation.Code.CLI),
|
||||
"remediation_code_other": (finding.check_metadata.Remediation.Code.Other),
|
||||
"categories": unroll_list(finding.check_metadata.Categories),
|
||||
"depends_on": unroll_list(finding.check_metadata.DependsOn),
|
||||
"related_to": unroll_list(finding.check_metadata.RelatedTo),
|
||||
"notes": finding.check_metadata.Notes,
|
||||
}
|
||||
return finding_data
|
||||
|
||||
|
||||
class Status(str, Enum):
|
||||
PASS = "PASS"
|
||||
FAIL = "FAIL"
|
||||
MANUAL = "MANUAL"
|
||||
MUTED = "MUTED"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.models import (
|
||||
AWSWellArchitectedModel,
|
||||
)
|
||||
@@ -21,7 +21,7 @@ class AWSWellArchitected(ComplianceOutput):
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -29,7 +29,7 @@ class AWSWellArchitected(ComplianceOutput):
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.cis.models import AWSCISModel
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
@@ -19,7 +19,7 @@ class AWSCIS(ComplianceOutput):
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -27,7 +27,7 @@ class AWSCIS(ComplianceOutput):
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.cis.models import AzureCISModel
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
@@ -19,7 +19,7 @@ class AzureCIS(ComplianceOutput):
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -27,7 +27,7 @@ class AzureCIS(ComplianceOutput):
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.cis.models import GCPCISModel
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
@@ -19,7 +19,7 @@ class GCPCIS(ComplianceOutput):
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -27,7 +27,7 @@ class GCPCIS(ComplianceOutput):
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from datetime import datetime
|
||||
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.cis.models import KubernetesCISModel
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
@@ -21,7 +21,7 @@ class KubernetesCIS(ComplianceOutput):
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -29,7 +29,7 @@ class KubernetesCIS(ComplianceOutput):
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -7,6 +7,7 @@ from prowler.lib.outputs.compliance.ens.ens import get_ens_table
|
||||
from prowler.lib.outputs.compliance.generic.generic_table import (
|
||||
get_generic_compliance_table,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp import get_kisa_ismsp_table
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack import (
|
||||
get_mitre_attack_table,
|
||||
)
|
||||
@@ -62,6 +63,15 @@ def display_compliance_table(
|
||||
output_directory,
|
||||
compliance_overview,
|
||||
)
|
||||
elif "kisa_isms_" in compliance_framework:
|
||||
get_kisa_ismsp_table(
|
||||
findings,
|
||||
bulk_checks_metadata,
|
||||
compliance_framework,
|
||||
output_filename,
|
||||
output_directory,
|
||||
compliance_overview,
|
||||
)
|
||||
else:
|
||||
get_generic_compliance_table(
|
||||
findings,
|
||||
|
||||
@@ -2,7 +2,7 @@ from csv import DictWriter
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
from prowler.lib.outputs.output import Output
|
||||
@@ -28,7 +28,7 @@ class ComplianceOutput(Output):
|
||||
def __init__(
|
||||
self,
|
||||
findings: List[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
create_file_descriptor: bool = False,
|
||||
file_path: str = None,
|
||||
file_extension: str = "",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.compliance.ens.models import AWSENSModel
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
@@ -19,7 +19,7 @@ class AWSENS(ComplianceOutput):
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -27,7 +27,7 @@ class AWSENS(ComplianceOutput):
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.compliance.generic.models import GenericComplianceModel
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
@@ -19,7 +19,7 @@ class GenericCompliance(ComplianceOutput):
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -27,7 +27,7 @@ class GenericCompliance(ComplianceOutput):
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.compliance.iso27001.models import AWSISO27001Model
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
@@ -19,7 +19,7 @@ class AWSISO27001(ComplianceOutput):
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -27,7 +27,7 @@ class AWSISO27001(ComplianceOutput):
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
|
||||
89
prowler/lib/outputs/compliance/kisa_ismsp/kisa_ismsp.py
Normal file
89
prowler/lib/outputs/compliance/kisa_ismsp/kisa_ismsp.py
Normal file
@@ -0,0 +1,89 @@
|
||||
from colorama import Fore, Style
|
||||
from tabulate import tabulate
|
||||
|
||||
from prowler.config.config import orange_color
|
||||
|
||||
|
||||
def get_kisa_ismsp_table(
|
||||
findings: list,
|
||||
bulk_checks_metadata: dict,
|
||||
compliance_framework: str,
|
||||
output_filename: str,
|
||||
output_directory: str,
|
||||
compliance_overview: bool,
|
||||
):
|
||||
sections = {}
|
||||
kisa_ismsp_compliance_table = {
|
||||
"Provider": [],
|
||||
"Section": [],
|
||||
"Status": [],
|
||||
"Muted": [],
|
||||
}
|
||||
pass_count = []
|
||||
fail_count = []
|
||||
muted_count = []
|
||||
for index, finding in enumerate(findings):
|
||||
check = bulk_checks_metadata[finding.check_metadata.CheckID]
|
||||
check_compliances = check.Compliance
|
||||
for compliance in check_compliances:
|
||||
if (
|
||||
compliance.Framework.startswith("KISA")
|
||||
and compliance.Version in compliance_framework
|
||||
):
|
||||
for requirement in compliance.Requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
section = attribute.Section
|
||||
# Check if Section exists
|
||||
if section not in sections:
|
||||
sections[section] = {
|
||||
"Status": f"{Fore.GREEN}PASS{Style.RESET_ALL}",
|
||||
"Muted": 0,
|
||||
}
|
||||
if finding.muted:
|
||||
if index not in muted_count:
|
||||
muted_count.append(index)
|
||||
sections[section]["Muted"] += 1
|
||||
else:
|
||||
if finding.status == "FAIL" and index not in fail_count:
|
||||
fail_count.append(index)
|
||||
elif finding.status == "PASS" and index not in pass_count:
|
||||
pass_count.append(index)
|
||||
|
||||
# Add results to table
|
||||
sections = dict(sorted(sections.items()))
|
||||
for section in sections:
|
||||
kisa_ismsp_compliance_table["Provider"].append(compliance.Provider)
|
||||
kisa_ismsp_compliance_table["Section"].append(section)
|
||||
kisa_ismsp_compliance_table["Muted"].append(
|
||||
f"{orange_color}{sections[section]['Muted']}{Style.RESET_ALL}"
|
||||
)
|
||||
if len(fail_count) + len(pass_count) + len(muted_count) > 1:
|
||||
print(
|
||||
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
|
||||
)
|
||||
overview_table = [
|
||||
[
|
||||
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
|
||||
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
|
||||
]
|
||||
]
|
||||
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
||||
if not compliance_overview:
|
||||
print(
|
||||
f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:"
|
||||
)
|
||||
print(
|
||||
tabulate(
|
||||
kisa_ismsp_compliance_table,
|
||||
headers="keys",
|
||||
tablefmt="rounded_grid",
|
||||
)
|
||||
)
|
||||
print(
|
||||
f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}"
|
||||
)
|
||||
print(f"\nDetailed results of {compliance_framework.upper()} are in:")
|
||||
print(
|
||||
f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n"
|
||||
)
|
||||
93
prowler/lib/outputs/compliance/kisa_ismsp/kisa_ismsp_aws.py
Normal file
93
prowler/lib/outputs/compliance/kisa_ismsp/kisa_ismsp_aws.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.compliance.kisa_ismsp.models import AWSKISAISMSPModel
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
|
||||
|
||||
class AWSKISAISMSP(ComplianceOutput):
|
||||
"""
|
||||
This class represents the AWS KISA-ISMS-P compliance output.
|
||||
|
||||
Attributes:
|
||||
- _data (list): A list to store transformed data from findings.
|
||||
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
|
||||
|
||||
Methods:
|
||||
- transform: Transforms findings into AWS KISA-ISMS-P compliance format.
|
||||
"""
|
||||
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
Transforms a list of findings into AWS KISA-ISMS-P compliance format.
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
"""
|
||||
for finding in findings:
|
||||
# Get the compliance requirements for the finding
|
||||
finding_requirements = finding.compliance.get(compliance_name, [])
|
||||
for requirement in compliance.Requirements:
|
||||
if requirement.Id in finding_requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AWSKISAISMSPModel(
|
||||
Provider=finding.provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=finding.account_uid,
|
||||
Region=finding.region,
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Name=requirement.Name,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Domain=attribute.Domain,
|
||||
Requirements_Attributes_Subdomain=attribute.Subdomain,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_AuditChecklist=attribute.AuditChecklist,
|
||||
Requirements_Attributes_RelatedRegulations=attribute.RelatedRegulations,
|
||||
Requirements_Attributes_AuditEvidence=attribute.AuditEvidence,
|
||||
Requirements_Attributes_NonComplianceCases=attribute.NonComplianceCases,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_uid,
|
||||
ResourceName=finding.resource_name,
|
||||
CheckId=finding.check_id,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
# Add manual requirements to the compliance output
|
||||
for requirement in compliance.Requirements:
|
||||
if not requirement.Checks:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AWSKISAISMSPModel(
|
||||
Provider=compliance.Provider.lower(),
|
||||
Description=compliance.Description,
|
||||
AccountId="",
|
||||
Region="",
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Name=requirement.Name,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Domain=attribute.Domain,
|
||||
Requirements_Attributes_Subdomain=attribute.Subdomain,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_AuditChecklist=attribute.AuditChecklist,
|
||||
Requirements_Attributes_RelatedRegulations=attribute.RelatedRegulations,
|
||||
Requirements_Attributes_AuditEvidence=attribute.AuditEvidence,
|
||||
Requirements_Attributes_NonComplianceCases=attribute.NonComplianceCases,
|
||||
Status="MANUAL",
|
||||
StatusExtended="Manual check",
|
||||
ResourceId="manual_check",
|
||||
ResourceName="Manual check",
|
||||
CheckId="manual",
|
||||
Muted=False,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
31
prowler/lib/outputs/compliance/kisa_ismsp/models.py
Normal file
31
prowler/lib/outputs/compliance/kisa_ismsp/models.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AWSKISAISMSPModel(BaseModel):
|
||||
"""
|
||||
The AWS KISA-ISMS-P Model outputs findings in a format compliant with the AWS KISA-ISMS-P standard
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
AccountId: str
|
||||
Region: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Name: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_Domain: str
|
||||
Requirements_Attributes_Subdomain: str
|
||||
Requirements_Attributes_Section: str
|
||||
Requirements_Attributes_AuditChecklist: Optional[list[str]]
|
||||
Requirements_Attributes_RelatedRegulations: Optional[list[str]]
|
||||
Requirements_Attributes_AuditEvidence: Optional[list[str]]
|
||||
Requirements_Attributes_NonComplianceCases: Optional[list[str]]
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
ResourceName: str
|
||||
CheckId: str
|
||||
Muted: bool
|
||||
@@ -1,4 +1,4 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.compliance.mitre_attack.models import AWSMitreAttackModel
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
@@ -20,7 +20,7 @@ class AWSMitreAttack(ComplianceOutput):
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -28,7 +28,7 @@ class AWSMitreAttack(ComplianceOutput):
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.compliance.mitre_attack.models import AzureMitreAttackModel
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
@@ -20,7 +20,7 @@ class AzureMitreAttack(ComplianceOutput):
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -28,7 +28,7 @@ class AzureMitreAttack(ComplianceOutput):
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.compliance.mitre_attack.models import GCPMitreAttackModel
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
@@ -20,7 +20,7 @@ class GCPMitreAttack(ComplianceOutput):
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -28,7 +28,7 @@ class GCPMitreAttack(ComplianceOutput):
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from csv import DictWriter
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
@@ -7,7 +8,7 @@ from prowler.lib.outputs.utils import unroll_dict, unroll_list
|
||||
|
||||
|
||||
class CSV(Output):
|
||||
def transform(self, findings: list[Finding]) -> None:
|
||||
def transform(self, findings: List[Finding]) -> None:
|
||||
"""Transforms the findings into the CSV format.
|
||||
|
||||
Args:
|
||||
@@ -16,11 +17,68 @@ class CSV(Output):
|
||||
"""
|
||||
try:
|
||||
for finding in findings:
|
||||
finding_dict = {k.upper(): v for k, v in finding.dict().items()}
|
||||
finding_dict["COMPLIANCE"] = unroll_dict(finding.compliance)
|
||||
finding_dict["ACCOUNT_TAGS"] = unroll_list(finding.account_tags)
|
||||
finding_dict = {}
|
||||
finding_dict["AUTH_METHOD"] = finding.auth_method
|
||||
finding_dict["TIMESTAMP"] = finding.timestamp
|
||||
finding_dict["ACCOUNT_UID"] = finding.account_uid
|
||||
finding_dict["ACCOUNT_NAME"] = finding.account_name
|
||||
finding_dict["ACCOUNT_EMAIL"] = finding.account_email
|
||||
finding_dict["ACCOUNT_ORGANIZATION_UID"] = (
|
||||
finding.account_organization_uid
|
||||
)
|
||||
finding_dict["ACCOUNT_ORGANIZATION_NAME"] = (
|
||||
finding.account_organization_name
|
||||
)
|
||||
finding_dict["ACCOUNT_TAGS"] = unroll_dict(
|
||||
finding.account_tags, separator=":"
|
||||
)
|
||||
finding_dict["FINDING_UID"] = finding.uid
|
||||
finding_dict["PROVIDER"] = finding.metadata.Provider
|
||||
finding_dict["CHECK_ID"] = finding.metadata.CheckID
|
||||
finding_dict["CHECK_TITLE"] = finding.metadata.CheckTitle
|
||||
finding_dict["CHECK_TYPE"] = unroll_list(finding.metadata.CheckType)
|
||||
finding_dict["STATUS"] = finding.status.value
|
||||
finding_dict["SEVERITY"] = finding.severity.value
|
||||
finding_dict["STATUS_EXTENDED"] = finding.status_extended
|
||||
finding_dict["MUTED"] = finding.muted
|
||||
finding_dict["SERVICE_NAME"] = finding.metadata.ServiceName
|
||||
finding_dict["SUBSERVICE_NAME"] = finding.metadata.SubServiceName
|
||||
finding_dict["SEVERITY"] = finding.metadata.Severity.value
|
||||
finding_dict["RESOURCE_TYPE"] = finding.metadata.ResourceType
|
||||
finding_dict["RESOURCE_UID"] = finding.resource_uid
|
||||
finding_dict["RESOURCE_NAME"] = finding.resource_name
|
||||
finding_dict["RESOURCE_DETAILS"] = finding.resource_details
|
||||
finding_dict["RESOURCE_TAGS"] = unroll_dict(finding.resource_tags)
|
||||
finding_dict["PARTITION"] = finding.partition
|
||||
finding_dict["REGION"] = finding.region
|
||||
finding_dict["DESCRIPTION"] = finding.metadata.Description
|
||||
finding_dict["RISK"] = finding.metadata.Risk
|
||||
finding_dict["RELATED_URL"] = finding.metadata.RelatedUrl
|
||||
finding_dict["REMEDIATION_RECOMMENDATION_TEXT"] = (
|
||||
finding.metadata.Remediation.Recommendation.Text
|
||||
)
|
||||
finding_dict["REMEDIATION_RECOMMENDATION_URL"] = (
|
||||
finding.metadata.Remediation.Recommendation.Url
|
||||
)
|
||||
finding_dict["REMEDIATION_CODE_NATIVEIAC"] = (
|
||||
finding.metadata.Remediation.Code.NativeIaC
|
||||
)
|
||||
finding_dict["REMEDIATION_CODE_TERRAFORM"] = (
|
||||
finding.metadata.Remediation.Code.Terraform
|
||||
)
|
||||
finding_dict["REMEDIATION_CODE_CLI"] = (
|
||||
finding.metadata.Remediation.Code.CLI
|
||||
)
|
||||
finding_dict["REMEDIATION_CODE_OTHER"] = (
|
||||
finding.metadata.Remediation.Code.Other
|
||||
)
|
||||
finding_dict["COMPLIANCE"] = unroll_dict(
|
||||
finding.compliance, separator=": "
|
||||
)
|
||||
finding_dict["CATEGORIES"] = unroll_list(finding.metadata.Categories)
|
||||
finding_dict["DEPENDS_ON"] = unroll_list(finding.metadata.DependsOn)
|
||||
finding_dict["RELATED_TO"] = unroll_list(finding.metadata.RelatedTo)
|
||||
finding_dict["NOTES"] = finding.metadata.Notes
|
||||
finding_dict["PROWLER_VERSION"] = finding.prowler_version
|
||||
self._data.append(finding_dict)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
|
||||
@@ -1,34 +1,17 @@
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from prowler.config.config import prowler_version
|
||||
from prowler.lib.check.models import Check_Report
|
||||
from prowler.lib.check.models import Check_Report, CheckMetadata
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.common import (
|
||||
fill_common_finding_data,
|
||||
get_provider_data_mapping,
|
||||
)
|
||||
from prowler.lib.outputs.common import Status, fill_common_finding_data
|
||||
from prowler.lib.outputs.compliance.compliance import get_check_compliance
|
||||
from prowler.lib.utils.utils import dict_to_lowercase, get_nested_attribute
|
||||
from prowler.providers.common.provider import Provider
|
||||
|
||||
|
||||
class Status(str, Enum):
|
||||
PASS = "PASS"
|
||||
FAIL = "FAIL"
|
||||
MANUAL = "MANUAL"
|
||||
|
||||
|
||||
class Severity(str, Enum):
|
||||
critical = "critical"
|
||||
high = "high"
|
||||
medium = "medium"
|
||||
low = "low"
|
||||
informational = "informational"
|
||||
|
||||
|
||||
class Finding(BaseModel):
|
||||
"""
|
||||
Represents the output model for a finding across different providers.
|
||||
@@ -41,81 +24,130 @@ class Finding(BaseModel):
|
||||
auth_method: str
|
||||
timestamp: Union[int, datetime]
|
||||
account_uid: str
|
||||
# Optional since it depends on permissions
|
||||
account_name: Optional[str]
|
||||
# Optional since it depends on permissions
|
||||
account_email: Optional[str]
|
||||
# Optional since it depends on permissions
|
||||
account_organization_uid: Optional[str]
|
||||
# Optional since it depends on permissions
|
||||
account_organization_name: Optional[str]
|
||||
# Optional since it depends on permissions
|
||||
account_tags: Optional[list[str]]
|
||||
finding_uid: str
|
||||
provider: str
|
||||
check_id: str
|
||||
check_title: str
|
||||
check_type: str
|
||||
account_name: Optional[str] = None
|
||||
account_email: Optional[str] = None
|
||||
account_organization_uid: Optional[str] = None
|
||||
account_organization_name: Optional[str] = None
|
||||
metadata: CheckMetadata
|
||||
account_tags: dict = {}
|
||||
uid: str
|
||||
status: Status
|
||||
status_extended: str
|
||||
muted: bool = False
|
||||
service_name: str
|
||||
subservice_name: str
|
||||
severity: Severity
|
||||
resource_type: str
|
||||
resource_uid: str
|
||||
resource_name: str
|
||||
resource_details: str
|
||||
resource_tags: str
|
||||
# Only present for AWS and Azure
|
||||
partition: Optional[str]
|
||||
resource_tags: dict = Field(default_factory=dict)
|
||||
partition: Optional[str] = None
|
||||
region: str
|
||||
description: str
|
||||
risk: str
|
||||
related_url: str
|
||||
remediation_recommendation_text: str
|
||||
remediation_recommendation_url: str
|
||||
remediation_code_nativeiac: str
|
||||
remediation_code_terraform: str
|
||||
remediation_code_cli: str
|
||||
remediation_code_other: str
|
||||
compliance: dict
|
||||
categories: str
|
||||
depends_on: str
|
||||
related_to: str
|
||||
notes: str
|
||||
prowler_version: str = prowler_version
|
||||
raw: dict = Field(default_factory=dict)
|
||||
|
||||
@property
|
||||
def provider(self) -> str:
|
||||
"""
|
||||
Returns the provider from the finding check's metadata.
|
||||
"""
|
||||
return self.metadata.Provider
|
||||
|
||||
@property
|
||||
def check_id(self) -> str:
|
||||
"""
|
||||
Returns the ID from the finding check's metadata.
|
||||
"""
|
||||
return self.metadata.CheckID
|
||||
|
||||
@property
|
||||
def severity(self) -> str:
|
||||
"""
|
||||
Returns the severity from the finding check's metadata.
|
||||
"""
|
||||
return self.metadata.Severity
|
||||
|
||||
@property
|
||||
def resource_type(self) -> str:
|
||||
"""
|
||||
Returns the resource type from the finding check's metadata.
|
||||
"""
|
||||
return self.metadata.ResourceType
|
||||
|
||||
@property
|
||||
def service_name(self) -> str:
|
||||
"""
|
||||
Returns the service name from the finding check's metadata.
|
||||
"""
|
||||
return self.metadata.ServiceName
|
||||
|
||||
def get_metadata(self) -> dict:
|
||||
"""
|
||||
Retrieves the metadata of the object and returns it as a dictionary with all keys in lowercase.
|
||||
Returns:
|
||||
dict: A dictionary containing the metadata with keys converted to lowercase.
|
||||
"""
|
||||
|
||||
return dict_to_lowercase(self.metadata.dict())
|
||||
|
||||
@classmethod
|
||||
def generate_output(
|
||||
cls, provider: Provider, check_output: Check_Report
|
||||
cls, provider: Provider, check_output: Check_Report, output_options
|
||||
) -> "Finding":
|
||||
"""Generates the output for a finding based on the provider and output options
|
||||
|
||||
Args:
|
||||
provider (Provider): the provider object
|
||||
check_output (Check_Report): the check output object
|
||||
output_options: the output options object, depending on the provider
|
||||
Returns:
|
||||
finding_output (Finding): the finding output object
|
||||
|
||||
"""
|
||||
output_options = provider.output_options
|
||||
# TODO: think about get_provider_data_mapping
|
||||
provider_data_mapping = get_provider_data_mapping(provider)
|
||||
# TODO: move fill_common_finding_data
|
||||
common_finding_data = fill_common_finding_data(
|
||||
check_output, output_options.unix_timestamp
|
||||
)
|
||||
unix_timestamp = False
|
||||
if hasattr(output_options, "unix_timestamp"):
|
||||
unix_timestamp = output_options.unix_timestamp
|
||||
|
||||
common_finding_data = fill_common_finding_data(check_output, unix_timestamp)
|
||||
output_data = {}
|
||||
output_data.update(provider_data_mapping)
|
||||
output_data.update(common_finding_data)
|
||||
|
||||
bulk_checks_metadata = {}
|
||||
if hasattr(output_options, "bulk_checks_metadata"):
|
||||
bulk_checks_metadata = output_options.bulk_checks_metadata
|
||||
|
||||
output_data["compliance"] = get_check_compliance(
|
||||
check_output, provider.type, output_options.bulk_checks_metadata
|
||||
check_output, provider.type, bulk_checks_metadata
|
||||
)
|
||||
try:
|
||||
output_data["provider"] = provider.type
|
||||
|
||||
if provider.type == "aws":
|
||||
output_data["account_uid"] = get_nested_attribute(
|
||||
provider, "identity.account"
|
||||
)
|
||||
output_data["account_name"] = get_nested_attribute(
|
||||
provider, "organizations_metadata.account_name"
|
||||
)
|
||||
output_data["account_email"] = get_nested_attribute(
|
||||
provider, "organizations_metadata.account_email"
|
||||
)
|
||||
output_data["account_organization_uid"] = get_nested_attribute(
|
||||
provider, "organizations_metadata.organization_arn"
|
||||
)
|
||||
output_data["account_organization_name"] = get_nested_attribute(
|
||||
provider, "organizations_metadata.organization_id"
|
||||
)
|
||||
output_data["account_tags"] = get_nested_attribute(
|
||||
provider, "organizations_metadata.account_tags"
|
||||
)
|
||||
output_data["partition"] = get_nested_attribute(
|
||||
provider, "identity.partition"
|
||||
)
|
||||
|
||||
# TODO: probably Organization UID is without the account id
|
||||
output_data["auth_method"] = f"profile: {output_data['auth_method']}"
|
||||
output_data["auth_method"] = (
|
||||
f"profile: {get_nested_attribute(provider, 'identity.profile')}"
|
||||
)
|
||||
output_data["resource_name"] = check_output.resource_id
|
||||
output_data["resource_uid"] = check_output.resource_arn
|
||||
output_data["region"] = check_output.region
|
||||
@@ -126,9 +158,9 @@ class Finding(BaseModel):
|
||||
f"{provider.identity.identity_type}: {provider.identity.identity_id}"
|
||||
)
|
||||
# Get the first tenant domain ID, just in case
|
||||
output_data["account_organization_uid"] = output_data[
|
||||
"account_organization_uid"
|
||||
][0]
|
||||
output_data["account_organization_uid"] = get_nested_attribute(
|
||||
provider, "identity.tenant_ids"
|
||||
)[0]
|
||||
output_data["account_uid"] = (
|
||||
output_data["account_organization_uid"]
|
||||
if "Tenant:" in check_output.subscription
|
||||
@@ -138,15 +170,33 @@ class Finding(BaseModel):
|
||||
output_data["resource_name"] = check_output.resource_name
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = check_output.location
|
||||
# TODO: check the tenant_ids
|
||||
# TODO: we have to get the account organization, the tenant is not that
|
||||
output_data["account_organization_name"] = get_nested_attribute(
|
||||
provider, "identity.tenant_domain"
|
||||
)
|
||||
|
||||
output_data["partition"] = get_nested_attribute(
|
||||
provider, "region_config.name"
|
||||
)
|
||||
# TODO: pending to get the subscription tags
|
||||
# "account_tags": "organizations_metadata.account_details_tags",
|
||||
# TODO: store subscription_name + id pairs
|
||||
# "account_name": "organizations_metadata.account_details_name",
|
||||
# "account_email": "organizations_metadata.account_details_email",
|
||||
|
||||
elif provider.type == "gcp":
|
||||
output_data["auth_method"] = f"Principal: {output_data['auth_method']}"
|
||||
output_data["auth_method"] = (
|
||||
f"Principal: {get_nested_attribute(provider, 'identity.profile')}"
|
||||
)
|
||||
output_data["account_uid"] = provider.projects[
|
||||
check_output.project_id
|
||||
].id
|
||||
output_data["account_name"] = provider.projects[
|
||||
check_output.project_id
|
||||
].name
|
||||
# There is no concept as project email in GCP
|
||||
# "account_email": "organizations_metadata.account_details_email",
|
||||
output_data["account_tags"] = provider.projects[
|
||||
check_output.project_id
|
||||
].labels
|
||||
@@ -165,7 +215,7 @@ class Finding(BaseModel):
|
||||
check_output.project_id
|
||||
].organization.id
|
||||
# TODO: for now is None since we don't retrieve that data
|
||||
output_data["account_organization"] = provider.projects[
|
||||
output_data["account_organization_name"] = provider.projects[
|
||||
check_output.project_id
|
||||
].organization.display_name
|
||||
|
||||
@@ -177,12 +227,15 @@ class Finding(BaseModel):
|
||||
output_data["resource_name"] = check_output.resource_name
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["account_name"] = f"context: {provider.identity.context}"
|
||||
output_data["account_uid"] = get_nested_attribute(
|
||||
provider, "identity.cluster"
|
||||
)
|
||||
output_data["region"] = f"namespace: {check_output.namespace}"
|
||||
|
||||
# check_output Unique ID
|
||||
# TODO: move this to a function
|
||||
# TODO: in Azure, GCP and K8s there are fidings without resource_name
|
||||
output_data["finding_uid"] = (
|
||||
output_data["uid"] = (
|
||||
f"prowler-{provider.type}-{check_output.check_metadata.CheckID}-{output_data['account_uid']}-"
|
||||
f"{output_data['region']}-{output_data['resource_name']}"
|
||||
)
|
||||
|
||||
@@ -39,17 +39,17 @@ class HTML(Output):
|
||||
f"""
|
||||
<tr class="{row_class}">
|
||||
<td>{finding_status}</td>
|
||||
<td>{finding.severity.value}</td>
|
||||
<td>{finding.service_name}</td>
|
||||
<td>{finding.metadata.Severity.value}</td>
|
||||
<td>{finding.metadata.ServiceName}</td>
|
||||
<td>{finding.region.lower()}</td>
|
||||
<td>{finding.check_id.replace("_", "<wbr />_")}</td>
|
||||
<td>{finding.check_title}</td>
|
||||
<td>{finding.metadata.CheckID.replace("_", "<wbr />_")}</td>
|
||||
<td>{finding.metadata.CheckTitle}</td>
|
||||
<td>{finding.resource_uid.replace("<", "<").replace(">", ">").replace("_", "<wbr />_")}</td>
|
||||
<td>{parse_html_string(finding.resource_tags)}</td>
|
||||
<td>{parse_html_string(unroll_dict(finding.resource_tags))}</td>
|
||||
<td>{finding.status_extended.replace("<", "<").replace(">", ">").replace("_", "<wbr />_")}</td>
|
||||
<td><p class="show-read-more">{html.escape(finding.risk)}</p></td>
|
||||
<td><p class="show-read-more">{html.escape(finding.remediation_recommendation_text)}</p> <a class="read-more" href="{finding.remediation_recommendation_url}"><i class="fas fa-external-link-alt"></i></a></td>
|
||||
<td><p class="show-read-more">{parse_html_string(unroll_dict(finding.compliance))}</p></td>
|
||||
<td><p class="show-read-more">{html.escape(finding.metadata.Risk)}</p></td>
|
||||
<td><p class="show-read-more">{html.escape(finding.metadata.Remediation.Recommendation.Text)}</p> <a class="read-more" href="{finding.metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
|
||||
<td><p class="show-read-more">{parse_html_string(unroll_dict(finding.compliance, separator=": "))}</p></td>
|
||||
</tr>
|
||||
"""
|
||||
)
|
||||
@@ -173,9 +173,15 @@ class HTML(Output):
|
||||
<li class="list-group-item">
|
||||
<b>Passed:</b> {str(stats.get("total_pass", 0))}
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Passed (Muted):</b> {str(stats.get("total_muted_pass", 0))}
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Failed:</b> {str(stats.get("total_fail", 0))}
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Failed (Muted):</b> {str(stats.get("total_muted_fail", 0))}
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Total Resources:</b> {str(stats.get("resources_count", 0))}
|
||||
</li>
|
||||
|
||||
@@ -9,7 +9,6 @@ from py_ocsf_models.events.findings.detection_finding import (
|
||||
from py_ocsf_models.events.findings.finding import ActivityID, FindingInformation
|
||||
from py_ocsf_models.objects.account import Account, TypeID
|
||||
from py_ocsf_models.objects.cloud import Cloud
|
||||
from py_ocsf_models.objects.container import Container
|
||||
from py_ocsf_models.objects.group import Group
|
||||
from py_ocsf_models.objects.metadata import Metadata
|
||||
from py_ocsf_models.objects.organization import Organization
|
||||
@@ -20,6 +19,7 @@ from py_ocsf_models.objects.resource_details import ResourceDetails
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
from prowler.lib.outputs.output import Output
|
||||
from prowler.lib.outputs.utils import unroll_dict_to_list
|
||||
|
||||
|
||||
class OCSF(Output):
|
||||
@@ -36,7 +36,7 @@ class OCSF(Output):
|
||||
- transform(findings: List[Finding]) -> None: Transforms the findings into the OCSF Detection Finding format.
|
||||
- batch_write_data_to_file() -> None: Writes the findings to a file using the OCSF Detection Finding format using the `Output._file_descriptor`.
|
||||
- get_account_type_id_by_provider(provider: str) -> TypeID: Returns the TypeID based on the provider.
|
||||
- get_finding_status_id(status: str, muted: bool) -> StatusID: Returns the StatusID based on the status and muted values.
|
||||
- get_finding_status_id(muted: bool) -> StatusID: Returns the StatusID based on the muted value.
|
||||
|
||||
References:
|
||||
- OCSF: https://schema.ocsf.io/1.2.0/classes/detection_finding
|
||||
@@ -53,37 +53,42 @@ class OCSF(Output):
|
||||
for finding in findings:
|
||||
finding_activity = ActivityID.Create
|
||||
cloud_account_type = self.get_account_type_id_by_provider(
|
||||
finding.provider
|
||||
finding.metadata.Provider
|
||||
)
|
||||
finding_severity = getattr(
|
||||
SeverityID, finding.severity.capitalize(), SeverityID.Unknown
|
||||
)
|
||||
finding_status = self.get_finding_status_id(
|
||||
finding.status, finding.muted
|
||||
SeverityID,
|
||||
finding.metadata.Severity.capitalize(),
|
||||
SeverityID.Unknown,
|
||||
)
|
||||
finding_status = self.get_finding_status_id(finding.muted)
|
||||
|
||||
detection_finding = DetectionFinding(
|
||||
message=finding.status_extended,
|
||||
activity_id=finding_activity.value,
|
||||
activity_name=finding_activity.name,
|
||||
finding_info=FindingInformation(
|
||||
created_time=finding.timestamp,
|
||||
desc=finding.description,
|
||||
title=finding.check_title,
|
||||
uid=finding.finding_uid,
|
||||
created_time_dt=finding.timestamp,
|
||||
created_time=int(finding.timestamp.timestamp()),
|
||||
desc=finding.metadata.Description,
|
||||
title=finding.metadata.CheckTitle,
|
||||
uid=finding.uid,
|
||||
name=finding.resource_name,
|
||||
product_uid="prowler",
|
||||
types=finding.metadata.CheckType,
|
||||
),
|
||||
event_time=finding.timestamp,
|
||||
time_dt=finding.timestamp,
|
||||
time=int(finding.timestamp.timestamp()),
|
||||
remediation=Remediation(
|
||||
desc=finding.remediation_recommendation_text,
|
||||
desc=finding.metadata.Remediation.Recommendation.Text,
|
||||
references=list(
|
||||
filter(
|
||||
None,
|
||||
[
|
||||
finding.remediation_code_nativeiac,
|
||||
finding.remediation_code_terraform,
|
||||
finding.remediation_code_cli,
|
||||
finding.remediation_code_other,
|
||||
finding.remediation_recommendation_url,
|
||||
finding.metadata.Remediation.Code.NativeIaC,
|
||||
finding.metadata.Remediation.Code.Terraform,
|
||||
finding.metadata.Remediation.Code.CLI,
|
||||
finding.metadata.Remediation.Code.Other,
|
||||
finding.metadata.Remediation.Recommendation.Url,
|
||||
],
|
||||
)
|
||||
),
|
||||
@@ -94,65 +99,73 @@ class OCSF(Output):
|
||||
status=finding_status.name,
|
||||
status_code=finding.status,
|
||||
status_detail=finding.status_extended,
|
||||
risk_details=finding.risk,
|
||||
resources=[
|
||||
ResourceDetails(
|
||||
# TODO: Check labels for other providers
|
||||
labels=(
|
||||
finding.resource_tags.split(",")
|
||||
if finding.resource_tags
|
||||
else []
|
||||
),
|
||||
name=finding.resource_name,
|
||||
uid=finding.resource_uid,
|
||||
group=Group(name=finding.service_name),
|
||||
type=finding.resource_type,
|
||||
# TODO: this should be included only if using the Cloud profile
|
||||
cloud_partition=finding.partition,
|
||||
region=finding.region,
|
||||
data={"details": finding.resource_details},
|
||||
)
|
||||
],
|
||||
risk_details=finding.metadata.Risk,
|
||||
resources=(
|
||||
[
|
||||
ResourceDetails(
|
||||
labels=unroll_dict_to_list(finding.resource_tags),
|
||||
name=finding.resource_name,
|
||||
uid=finding.resource_uid,
|
||||
group=Group(name=finding.metadata.ServiceName),
|
||||
type=finding.metadata.ResourceType,
|
||||
# TODO: this should be included only if using the Cloud profile
|
||||
cloud_partition=finding.partition,
|
||||
region=finding.region,
|
||||
data={"details": finding.resource_details},
|
||||
)
|
||||
]
|
||||
if finding.metadata.Provider != "kubernetes"
|
||||
else [
|
||||
ResourceDetails(
|
||||
labels=unroll_dict_to_list(finding.resource_tags),
|
||||
name=finding.resource_name,
|
||||
uid=finding.resource_uid,
|
||||
group=Group(name=finding.metadata.ServiceName),
|
||||
type=finding.metadata.ResourceType,
|
||||
data={"details": finding.resource_details},
|
||||
namespace=finding.region.replace("namespace: ", ""),
|
||||
)
|
||||
]
|
||||
),
|
||||
metadata=Metadata(
|
||||
event_code=finding.check_id,
|
||||
event_code=finding.metadata.CheckID,
|
||||
product=Product(
|
||||
uid="prowler",
|
||||
name="Prowler",
|
||||
vendor_name="Prowler",
|
||||
version=finding.prowler_version,
|
||||
),
|
||||
profiles=(
|
||||
["cloud", "datetime"]
|
||||
if finding.metadata.Provider != "kubernetes"
|
||||
else ["container", "datetime"]
|
||||
),
|
||||
tenant_uid=finding.account_organization_uid,
|
||||
),
|
||||
type_uid=DetectionFindingTypeID.Create,
|
||||
type_name=DetectionFindingTypeID.Create.name,
|
||||
type_name=f"Detection Finding: {DetectionFindingTypeID.Create.name}",
|
||||
unmapped={
|
||||
"check_type": finding.check_type,
|
||||
"related_url": finding.related_url,
|
||||
"categories": finding.categories,
|
||||
"depends_on": finding.depends_on,
|
||||
"related_to": finding.related_to,
|
||||
"notes": finding.notes,
|
||||
"related_url": finding.metadata.RelatedUrl,
|
||||
"categories": finding.metadata.Categories,
|
||||
"depends_on": finding.metadata.DependsOn,
|
||||
"related_to": finding.metadata.RelatedTo,
|
||||
"notes": finding.metadata.Notes,
|
||||
"compliance": finding.compliance,
|
||||
},
|
||||
)
|
||||
|
||||
if finding.provider == "kubernetes":
|
||||
detection_finding.container = Container(
|
||||
name=finding.resource_name,
|
||||
uid=finding.resource_uid,
|
||||
)
|
||||
# TODO: Get the PID of the namespace (we only have the name of the namespace)
|
||||
# detection_finding.namespace_pid=,
|
||||
else:
|
||||
if finding.provider != "kubernetes":
|
||||
detection_finding.cloud = Cloud(
|
||||
account=Account(
|
||||
name=finding.account_name,
|
||||
type_id=cloud_account_type.value,
|
||||
type=cloud_account_type.name,
|
||||
type=cloud_account_type.name.replace("_", " "),
|
||||
uid=finding.account_uid,
|
||||
labels=finding.account_tags,
|
||||
labels=unroll_dict_to_list(finding.account_tags),
|
||||
),
|
||||
org=Organization(
|
||||
uid=finding.account_organization_uid,
|
||||
name=finding.account_organization_name,
|
||||
# TODO: add the org unit id and name
|
||||
),
|
||||
provider=finding.provider,
|
||||
region=finding.region,
|
||||
@@ -212,20 +225,17 @@ class OCSF(Output):
|
||||
return type_id
|
||||
|
||||
@staticmethod
|
||||
def get_finding_status_id(status: str, muted: bool) -> StatusID:
|
||||
def get_finding_status_id(muted: bool) -> StatusID:
|
||||
"""
|
||||
Returns the StatusID based on the status and muted values.
|
||||
Returns the StatusID based on the muted value.
|
||||
|
||||
Args:
|
||||
status (str): The status value
|
||||
muted (bool): The muted value
|
||||
|
||||
Returns:
|
||||
StatusID: The StatusID based on the status and muted values
|
||||
StatusID: The StatusID based on the muted value
|
||||
"""
|
||||
status_id = StatusID.Other
|
||||
if status == "FAIL":
|
||||
status_id = StatusID.New
|
||||
status_id = StatusID.New
|
||||
if muted:
|
||||
status_id = StatusID.Suppressed
|
||||
return status_id
|
||||
|
||||
@@ -25,10 +25,12 @@ def stdout_report(finding, color, verbose, status, fix):
|
||||
)
|
||||
|
||||
|
||||
# TODO: Only pass check_findings, provider.output_options and provider.type
|
||||
def report(check_findings, provider):
|
||||
# TODO: Only pass check_findings, output_options and provider.type
|
||||
def report(check_findings, provider, output_options):
|
||||
try:
|
||||
output_options = provider.output_options
|
||||
verbose = False
|
||||
if hasattr(output_options, "verbose"):
|
||||
verbose = output_options.verbose
|
||||
if check_findings:
|
||||
# TO-DO Generic Function
|
||||
if provider.type == "aws":
|
||||
@@ -39,21 +41,27 @@ def report(check_findings, provider):
|
||||
|
||||
for finding in check_findings:
|
||||
# Print findings by stdout
|
||||
status = []
|
||||
if hasattr(output_options, "status"):
|
||||
status = output_options.status
|
||||
fixer = False
|
||||
if hasattr(output_options, "fixer"):
|
||||
fixer = output_options.fixer
|
||||
color = set_report_color(finding.status, finding.muted)
|
||||
stdout_report(
|
||||
finding,
|
||||
color,
|
||||
output_options.verbose,
|
||||
output_options.status,
|
||||
output_options.fixer,
|
||||
verbose,
|
||||
status,
|
||||
fixer,
|
||||
)
|
||||
|
||||
else: # No service resources in the whole account
|
||||
color = set_report_color("MANUAL")
|
||||
if output_options.verbose:
|
||||
if verbose:
|
||||
print(f"\t{color}INFO{Style.RESET_ALL} There are no resources")
|
||||
# Separator between findings and bar
|
||||
if output_options.verbose:
|
||||
if verbose:
|
||||
print()
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -82,35 +90,82 @@ def extract_findings_statistics(findings: list) -> dict:
|
||||
extract_findings_statistics takes a list of findings and returns the following dict with the aggregated statistics
|
||||
{
|
||||
"total_pass": 0,
|
||||
"total_muted_pass": 0,
|
||||
"total_fail": 0,
|
||||
"total_muted_fail": 0,
|
||||
"resources_count": 0,
|
||||
"findings_count": 0,
|
||||
"critical_failed_findings": [],
|
||||
"critical_passed_findings": []
|
||||
"all_fails_are_muted": False
|
||||
}
|
||||
"""
|
||||
logger.info("Extracting audit statistics...")
|
||||
stats = {}
|
||||
total_pass = 0
|
||||
total_fail = 0
|
||||
muted_pass = 0
|
||||
muted_fail = 0
|
||||
resources = set()
|
||||
findings_count = 0
|
||||
all_fails_are_muted = True
|
||||
critical_severity_pass = 0
|
||||
critical_severity_fail = 0
|
||||
high_severity_pass = 0
|
||||
high_severity_fail = 0
|
||||
medium_severity_pass = 0
|
||||
medium_severity_fail = 0
|
||||
low_severity_pass = 0
|
||||
low_severity_fail = 0
|
||||
|
||||
for finding in findings:
|
||||
# Save the resource_id
|
||||
resources.add(finding.resource_id)
|
||||
|
||||
if finding.status == "PASS":
|
||||
if finding.check_metadata.Severity == "critical":
|
||||
critical_severity_pass += 1
|
||||
if finding.check_metadata.Severity == "high":
|
||||
high_severity_pass += 1
|
||||
if finding.check_metadata.Severity == "medium":
|
||||
medium_severity_pass += 1
|
||||
if finding.check_metadata.Severity == "low":
|
||||
low_severity_pass += 1
|
||||
total_pass += 1
|
||||
findings_count += 1
|
||||
if finding.muted is True:
|
||||
muted_pass += 1
|
||||
|
||||
if finding.status == "FAIL":
|
||||
if finding.check_metadata.Severity == "critical":
|
||||
critical_severity_fail += 1
|
||||
if finding.check_metadata.Severity == "high":
|
||||
high_severity_fail += 1
|
||||
if finding.check_metadata.Severity == "medium":
|
||||
medium_severity_fail += 1
|
||||
if finding.check_metadata.Severity == "low":
|
||||
low_severity_fail += 1
|
||||
total_fail += 1
|
||||
findings_count += 1
|
||||
if finding.muted is True:
|
||||
muted_fail += 1
|
||||
if not finding.muted and all_fails_are_muted:
|
||||
all_fails_are_muted = False
|
||||
|
||||
stats["total_pass"] = total_pass
|
||||
stats["total_muted_pass"] = muted_pass
|
||||
stats["total_fail"] = total_fail
|
||||
stats["total_muted_fail"] = muted_fail
|
||||
stats["resources_count"] = len(resources)
|
||||
stats["findings_count"] = findings_count
|
||||
stats["total_critical_severity_fail"] = critical_severity_fail
|
||||
stats["total_critical_severity_pass"] = critical_severity_pass
|
||||
stats["total_high_severity_fail"] = high_severity_fail
|
||||
stats["total_high_severity_pass"] = high_severity_pass
|
||||
stats["total_medium_severity_fail"] = medium_severity_fail
|
||||
stats["total_medium_severity_pass"] = medium_severity_pass
|
||||
stats["total_low_severity_fail"] = medium_severity_fail
|
||||
stats["total_low_severity_pass"] = medium_severity_pass
|
||||
stats["all_fails_are_muted"] = all_fails_are_muted
|
||||
|
||||
return stats
|
||||
|
||||
61
prowler/lib/outputs/slack/exceptions/exceptions.py
Normal file
61
prowler/lib/outputs/slack/exceptions/exceptions.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from prowler.exceptions.exceptions import ProwlerException
|
||||
|
||||
|
||||
# Exceptions codes from 8000 to 8999 are reserved for Slack exceptions
|
||||
class SlackBaseException(ProwlerException):
|
||||
"""Base class for Slack errors."""
|
||||
|
||||
SLACK_ERROR_CODES = {
|
||||
(8000, "SlackClientError"): {
|
||||
"message": "Slack ClientError occurred",
|
||||
"remediation": "Check your Slack client configuration and permissions.",
|
||||
},
|
||||
(8001, "SlackNoCredentialsError"): {
|
||||
"message": "Invalid Slack credentials found",
|
||||
"remediation": "Some aspect of authentication cannot be validated. Either the provided token is invalid or the request originates from an IP address disallowed from making the request.",
|
||||
},
|
||||
(8002, "SlackChannelNotFound"): {
|
||||
"message": "Slack channel not found",
|
||||
"remediation": "Check the channel name and ensure it exists.",
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
error_info = self.SLACK_ERROR_CODES.get((code, self.__class__.__name__))
|
||||
if message:
|
||||
error_info["message"] = message
|
||||
super().__init__(
|
||||
code,
|
||||
source="Slack",
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
error_info=error_info,
|
||||
)
|
||||
|
||||
|
||||
class SlackCredentialsError(SlackBaseException):
|
||||
"""Base class for Slack credentials errors."""
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
super().__init__(code, file, original_exception, message)
|
||||
|
||||
|
||||
class SlackClientError(SlackCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
8000, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class SlackNoCredentialsError(SlackCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
8001, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class SlackChannelNotFound(SlackCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
8002, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
from slack_sdk import WebClient
|
||||
@@ -5,6 +6,12 @@ from slack_sdk.web.base_client import SlackResponse
|
||||
|
||||
from prowler.config.config import aws_logo, azure_logo, gcp_logo, square_logo_img
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.slack.exceptions.exceptions import (
|
||||
SlackChannelNotFound,
|
||||
SlackClientError,
|
||||
SlackNoCredentialsError,
|
||||
)
|
||||
from prowler.providers.common.models import Connection
|
||||
|
||||
|
||||
class Slack:
|
||||
@@ -43,6 +50,7 @@ class Slack:
|
||||
username="Prowler",
|
||||
icon_url=square_logo_img,
|
||||
channel=f"#{self.channel}",
|
||||
text="Prowler Scan Summary",
|
||||
blocks=self.__create_message_blocks__(identity, logo, stats, args),
|
||||
)
|
||||
return response
|
||||
@@ -50,7 +58,6 @@ class Slack:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return error
|
||||
|
||||
def __create_message_identity__(self, provider: Any):
|
||||
"""
|
||||
@@ -121,6 +128,19 @@ class Slack:
|
||||
"text": f"\n:white_check_mark: *{stats['total_pass']} Passed findings* ({round(stats['total_pass'] / stats['findings_count'] * 100 , 2)}%)\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": (
|
||||
"*Severities:*\n"
|
||||
f"• *Critical:* {stats['total_critical_severity_pass']} "
|
||||
f"• *High:* {stats['total_high_severity_pass']} "
|
||||
f"• *Medium:* {stats['total_medium_severity_pass']} "
|
||||
f"• *Low:* {stats['total_low_severity_pass']}"
|
||||
),
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
@@ -128,6 +148,19 @@ class Slack:
|
||||
"text": f"\n:x: *{stats['total_fail']} Failed findings* ({round(stats['total_fail'] / stats['findings_count'] * 100 , 2)}%)\n ",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": (
|
||||
"*Severities:*\n"
|
||||
f"• *Critical:* {stats['total_critical_severity_fail']} "
|
||||
f"• *High:* {stats['total_high_severity_fail']} "
|
||||
f"• *Medium:* {stats['total_medium_severity_fail']} "
|
||||
f"• *Low:* {stats['total_low_severity_fail']}"
|
||||
),
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
@@ -204,3 +237,66 @@ class Slack:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def test_connection(
|
||||
token: str,
|
||||
channel: str,
|
||||
raise_on_exception: bool = True,
|
||||
) -> Connection:
|
||||
"""
|
||||
Test the Slack connection by validating the provided token and channel.
|
||||
|
||||
Args:
|
||||
token (str): The Slack token to be tested.
|
||||
channel (str): The Slack channel to be validated.
|
||||
|
||||
Returns:
|
||||
Connection: A Connection object.
|
||||
"""
|
||||
try:
|
||||
client = WebClient(token=token)
|
||||
# Test if the token is valid
|
||||
auth_response = client.auth_test()
|
||||
if auth_response["ok"]:
|
||||
# Test if the channel is accessible
|
||||
channels_response = client.conversations_info(
|
||||
token=token, channel=channel
|
||||
)
|
||||
if channels_response["ok"]:
|
||||
return Connection(is_connected=True)
|
||||
else:
|
||||
exception = SlackChannelNotFound(
|
||||
file=os.path.basename(__file__),
|
||||
message=(
|
||||
channels_response["error"]
|
||||
if "error" in channels_response
|
||||
else "Unknown error"
|
||||
),
|
||||
)
|
||||
if raise_on_exception:
|
||||
raise exception
|
||||
return Connection(error=exception)
|
||||
else:
|
||||
exception = SlackNoCredentialsError(
|
||||
file=os.path.basename(__file__),
|
||||
message=(
|
||||
auth_response["error"]
|
||||
if "error" in auth_response
|
||||
else "Unknown error"
|
||||
),
|
||||
)
|
||||
if raise_on_exception:
|
||||
raise exception
|
||||
return Connection(error=exception)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
if raise_on_exception:
|
||||
raise SlackClientError(
|
||||
file=os.path.basename(__file__),
|
||||
original_exception=error,
|
||||
) from error
|
||||
return Connection(error=error)
|
||||
|
||||
@@ -1,4 +1,24 @@
|
||||
def unroll_list(listed_items: list, separator: str = "|"):
|
||||
def unroll_list(listed_items: list, separator: str = "|") -> str:
|
||||
"""
|
||||
Unrolls a list of items into a single string, separated by a specified separator.
|
||||
|
||||
Args:
|
||||
listed_items (list): The list of items to be unrolled.
|
||||
separator (str, optional): The separator to be used between the items. Defaults to "|".
|
||||
|
||||
Returns:
|
||||
str: The unrolled string.
|
||||
|
||||
Examples:
|
||||
>>> unroll_list(['apple', 'banana', 'orange'])
|
||||
'apple | banana | orange'
|
||||
|
||||
>>> unroll_list(['apple', 'banana', 'orange'], separator=',')
|
||||
'apple, banana, orange'
|
||||
|
||||
>>> unroll_list([])
|
||||
''
|
||||
"""
|
||||
unrolled_items = ""
|
||||
if listed_items:
|
||||
for item in listed_items:
|
||||
@@ -13,70 +33,138 @@ def unroll_list(listed_items: list, separator: str = "|"):
|
||||
return unrolled_items
|
||||
|
||||
|
||||
def unroll_tags(tags: list):
|
||||
def unroll_tags(tags: list) -> dict:
|
||||
"""
|
||||
Unrolls a list of tags into a dictionary.
|
||||
|
||||
Args:
|
||||
tags (list): A list of tags.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the unrolled tags.
|
||||
|
||||
Examples:
|
||||
>>> tags = [{"key": "name", "value": "John"}, {"key": "age", "value": "30"}]
|
||||
>>> unroll_tags(tags)
|
||||
{'name': 'John', 'age': '30'}
|
||||
|
||||
>>> tags = [{"Key": "name", "Value": "John"}, {"Key": "age", "Value": "30"}]
|
||||
>>> unroll_tags(tags)
|
||||
{'name': 'John', 'age': '30'}
|
||||
|
||||
>>> tags = [{"key": "name"}]
|
||||
>>> unroll_tags(tags)
|
||||
{'name': ''}
|
||||
|
||||
>>> tags = [{"Key": "name"}]
|
||||
>>> unroll_tags(tags)
|
||||
{'name': ''}
|
||||
|
||||
>>> tags = [{"name": "John", "age": "30"}]
|
||||
>>> unroll_tags(tags)
|
||||
{'name': 'John', 'age': '30'}
|
||||
|
||||
>>> tags = []
|
||||
>>> unroll_tags(tags)
|
||||
{}
|
||||
|
||||
>>> tags = {"name": "John", "age": "30"}
|
||||
>>> unroll_tags(tags)
|
||||
{'name': 'John', 'age': '30'}
|
||||
|
||||
>>> tags = ["name", "age"]
|
||||
>>> unroll_tags(tags)
|
||||
{'name': '', 'age': ''}
|
||||
"""
|
||||
if tags and tags != [{}] and tags != [None] and tags != []:
|
||||
if isinstance(tags, dict):
|
||||
return tags
|
||||
if isinstance(tags[0], str) and len(tags) > 0:
|
||||
return {tag: "" for tag in tags}
|
||||
if "key" in tags[0]:
|
||||
return {item["key"]: item.get("value", "") for item in tags}
|
||||
elif "Key" in tags[0]:
|
||||
return {item["Key"]: item.get("Value", "") for item in tags}
|
||||
else:
|
||||
return {key: value for d in tags for key, value in d.items()}
|
||||
return {}
|
||||
|
||||
|
||||
def unroll_dict(dict: dict, separator: str = "=") -> str:
|
||||
"""
|
||||
Unrolls a dictionary into a string representation.
|
||||
|
||||
Args:
|
||||
dict (dict): The dictionary to be unrolled.
|
||||
|
||||
Returns:
|
||||
str: The unrolled string representation of the dictionary.
|
||||
|
||||
Examples:
|
||||
>>> my_dict = {'name': 'John', 'age': 30, 'hobbies': ['reading', 'coding']}
|
||||
>>> unroll_dict(my_dict)
|
||||
'name: John | age: 30 | hobbies: reading, coding'
|
||||
"""
|
||||
|
||||
unrolled_items = ""
|
||||
separator = "|"
|
||||
if tags and tags != [{}] and tags != [None]:
|
||||
for item in tags:
|
||||
# Check if there are tags in list
|
||||
if isinstance(item, dict):
|
||||
for key, value in item.items():
|
||||
if not unrolled_items:
|
||||
# Check the pattern of tags (Key:Value or Key:key/Value:value)
|
||||
if "Key" != key and "Value" != key:
|
||||
unrolled_items = f"{key}={value}"
|
||||
else:
|
||||
if "Key" == key:
|
||||
unrolled_items = f"{value}="
|
||||
else:
|
||||
unrolled_items = f"{value}"
|
||||
else:
|
||||
if "Key" != key and "Value" != key:
|
||||
unrolled_items = (
|
||||
f"{unrolled_items} {separator} {key}={value}"
|
||||
)
|
||||
else:
|
||||
if "Key" == key:
|
||||
unrolled_items = (
|
||||
f"{unrolled_items} {separator} {value}="
|
||||
)
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items}{value}"
|
||||
elif not unrolled_items:
|
||||
unrolled_items = f"{item}"
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items} {separator} {item}"
|
||||
|
||||
return unrolled_items
|
||||
|
||||
|
||||
def unroll_dict(dict: dict):
|
||||
unrolled_items = ""
|
||||
separator = "|"
|
||||
for key, value in dict.items():
|
||||
if isinstance(value, list):
|
||||
value = ", ".join(value)
|
||||
if not unrolled_items:
|
||||
unrolled_items = f"{key}: {value}"
|
||||
unrolled_items = f"{key}{separator}{value}"
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items} {separator} {key}: {value}"
|
||||
unrolled_items = f"{unrolled_items} | {key}{separator}{value}"
|
||||
|
||||
return unrolled_items
|
||||
|
||||
|
||||
def unroll_dict_to_list(dict: dict):
|
||||
def unroll_dict_to_list(dict: dict) -> list:
|
||||
"""
|
||||
Unrolls a dictionary into a list of key-value pairs.
|
||||
|
||||
Args:
|
||||
dict (dict): The dictionary to be unrolled.
|
||||
|
||||
Returns:
|
||||
list: A list of key-value pairs, where each pair is represented as a string.
|
||||
|
||||
Examples:
|
||||
>>> my_dict = {'name': 'John', 'age': 30, 'hobbies': ['reading', 'coding']}
|
||||
>>> unroll_dict_to_list(my_dict)
|
||||
['name: John', 'age: 30', 'hobbies: reading, coding']
|
||||
"""
|
||||
|
||||
dict_list = []
|
||||
for key, value in dict.items():
|
||||
if isinstance(value, list):
|
||||
value = ", ".join(value)
|
||||
dict_list.append(f"{key}: {value}")
|
||||
dict_list.append(f"{key}:{value}")
|
||||
else:
|
||||
dict_list.append(f"{key}: {value}")
|
||||
dict_list.append(f"{key}:{value}")
|
||||
|
||||
return dict_list
|
||||
|
||||
|
||||
def parse_json_tags(tags: list):
|
||||
def parse_json_tags(tags: list) -> dict[str, str]:
|
||||
"""
|
||||
Parses a list of JSON tags and returns a dictionary of key-value pairs.
|
||||
|
||||
Args:
|
||||
tags (list): A list of JSON tags.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the parsed key-value pairs from the tags.
|
||||
|
||||
Examples:
|
||||
>>> tags = [
|
||||
... {"Key": "Name", "Value": "John"},
|
||||
... {"Key": "Age", "Value": "30"},
|
||||
... {"Key": "City", "Value": "New York"}
|
||||
... ]
|
||||
>>> parse_json_tags(tags)
|
||||
{'Name': 'John', 'Age': '30', 'City': 'New York'}
|
||||
"""
|
||||
|
||||
dict_tags = {}
|
||||
if tags and tags != [{}] and tags != [None]:
|
||||
for tag in tags:
|
||||
@@ -88,7 +176,23 @@ def parse_json_tags(tags: list):
|
||||
return dict_tags
|
||||
|
||||
|
||||
def parse_html_string(str: str):
|
||||
def parse_html_string(str: str) -> str:
|
||||
"""
|
||||
Parses a string and returns a formatted HTML string.
|
||||
|
||||
This function takes an input string and splits it using the delimiter " | ".
|
||||
It then formats each element of the split string as a bullet point in HTML format.
|
||||
|
||||
Args:
|
||||
str (str): The input string to be parsed.
|
||||
|
||||
Returns:
|
||||
str: The formatted HTML string.
|
||||
|
||||
Example:
|
||||
>>> parse_html_string("item1 | item2 | item3")
|
||||
'\n•item1\n\n•item2\n\n•item3\n'
|
||||
"""
|
||||
string = ""
|
||||
for elem in str.split(" | "):
|
||||
if elem:
|
||||
|
||||
88
prowler/lib/scan/exceptions/exceptions.py
Normal file
88
prowler/lib/scan/exceptions/exceptions.py
Normal file
@@ -0,0 +1,88 @@
|
||||
from prowler.exceptions.exceptions import ProwlerException
|
||||
|
||||
|
||||
# Exceptions codes from 5000 to 5999 are reserved for Scan exceptions
|
||||
class ScanBaseException(ProwlerException):
|
||||
"""Base class for Scan errors."""
|
||||
|
||||
SCAN_ERROR_CODES = {
|
||||
(5000, "ScanInvalidSeverityError"): {
|
||||
"message": "Invalid severity level provided.",
|
||||
"remediation": "Please provide a valid severity level. Valid severities are: critical, high, medium, low, informational.",
|
||||
},
|
||||
(5001, "ScanInvalidCheckError"): {
|
||||
"message": "Invalid check provided.",
|
||||
"remediation": "Please provide a valid check name.",
|
||||
},
|
||||
(5002, "ScanInvalidServiceError"): {
|
||||
"message": "Invalid service provided.",
|
||||
"remediation": "Please provide a valid service name.",
|
||||
},
|
||||
(5003, "ScanInvalidComplianceFrameworkError"): {
|
||||
"message": "Invalid compliance framework provided.",
|
||||
"remediation": "Please provide a valid compliance framework name for the chosen provider.",
|
||||
},
|
||||
(5004, "ScanInvalidCategoryError"): {
|
||||
"message": "Invalid category provided.",
|
||||
"remediation": "Please provide a valid category name.",
|
||||
},
|
||||
(5005, "ScanInvalidStatusError"): {
|
||||
"message": "Invalid status provided.",
|
||||
"remediation": "Please provide a valid status: FAIL, PASS, MANUAL.",
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
module = "Scan"
|
||||
error_info = self.SCAN_ERROR_CODES.get((code, self.__class__.__name__))
|
||||
if message:
|
||||
error_info["message"] = message
|
||||
super().__init__(
|
||||
code=code,
|
||||
source=module,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
error_info=error_info,
|
||||
)
|
||||
|
||||
|
||||
class ScanInvalidSeverityError(ScanBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
5000, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class ScanInvalidCheckError(ScanBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
5001, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class ScanInvalidServiceError(ScanBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
5002, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class ScanInvalidComplianceFrameworkError(ScanBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
5003, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class ScanInvalidCategoryError(ScanBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
5004, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class ScanInvalidStatusError(ScanBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
5005, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
@@ -1,60 +1,379 @@
|
||||
from typing import Any
|
||||
import datetime
|
||||
from typing import Generator
|
||||
|
||||
from prowler.lib.check.check import execute
|
||||
from prowler.lib.check.models import Check_Report
|
||||
from prowler.lib.check.check import (
|
||||
execute,
|
||||
import_check,
|
||||
list_services,
|
||||
update_audit_metadata,
|
||||
)
|
||||
from prowler.lib.check.checks_loader import load_checks_to_execute
|
||||
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata, Severity
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.common import Status
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
from prowler.lib.scan.exceptions.exceptions import (
|
||||
ScanInvalidCategoryError,
|
||||
ScanInvalidCheckError,
|
||||
ScanInvalidComplianceFrameworkError,
|
||||
ScanInvalidServiceError,
|
||||
ScanInvalidSeverityError,
|
||||
ScanInvalidStatusError,
|
||||
)
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
from prowler.providers.common.provider import Provider
|
||||
|
||||
|
||||
def scan(
|
||||
checks_to_execute: list,
|
||||
global_provider: Any,
|
||||
custom_checks_metadata: Any,
|
||||
) -> list[Check_Report]:
|
||||
try:
|
||||
# List to store all the check's findings
|
||||
all_findings = []
|
||||
# Services and checks executed for the Audit Status
|
||||
services_executed = set()
|
||||
checks_executed = set()
|
||||
class Scan:
|
||||
_provider: Provider
|
||||
# Refactor(Core): This should replace the Audit_Metadata
|
||||
_number_of_checks_to_execute: int = 0
|
||||
_number_of_checks_completed: int = 0
|
||||
# TODO the str should be a set of Check objects
|
||||
_checks_to_execute: list[str]
|
||||
_service_checks_to_execute: dict[str, set[str]]
|
||||
_service_checks_completed: dict[str, set[str]]
|
||||
_progress: float = 0.0
|
||||
_findings: list = []
|
||||
_duration: int = 0
|
||||
_status: list[str] = None
|
||||
|
||||
# Initialize the Audit Metadata
|
||||
# TODO: this should be done in the provider class
|
||||
# Refactor(Core): Audit manager?
|
||||
global_provider.audit_metadata = Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=checks_to_execute,
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
)
|
||||
def __init__(
|
||||
self,
|
||||
provider: Provider,
|
||||
checks: list[str] = None,
|
||||
services: list[str] = None,
|
||||
compliances: list[str] = None,
|
||||
categories: list[str] = None,
|
||||
severities: list[str] = None,
|
||||
excluded_checks: list[str] = None,
|
||||
excluded_services: list[str] = None,
|
||||
status: list[str] = None,
|
||||
):
|
||||
"""
|
||||
Scan is the class that executes the checks and yields the progress and the findings.
|
||||
|
||||
for check_name in checks_to_execute:
|
||||
Params:
|
||||
provider: Provider -> The provider to scan
|
||||
checks: list[str] -> The checks to execute
|
||||
services: list[str] -> The services to scan
|
||||
compliances: list[str] -> The compliances to check
|
||||
categories: list[str] -> The categories of the checks
|
||||
severities: list[str] -> The severities of the checks
|
||||
excluded_checks: list[str] -> The checks to exclude
|
||||
excluded_services: list[str] -> The services to exclude
|
||||
status: list[str] -> The status of the checks
|
||||
|
||||
Raises:
|
||||
ScanInvalidCheckError: If the check does not exist in the provider or is from another provider.
|
||||
ScanInvalidServiceError: If the service does not exist in the provider.
|
||||
ScanInvalidComplianceFrameworkError: If the compliance framework does not exist in the provider.
|
||||
ScanInvalidCategoryError: If the category does not exist in the provider.
|
||||
ScanInvalidSeverityError: If the severity does not exist in the provider.
|
||||
ScanInvalidStatusError: If the status does not exist in the provider.
|
||||
"""
|
||||
self._provider = provider
|
||||
|
||||
# Validate the status
|
||||
if status:
|
||||
try:
|
||||
# Recover service from check name
|
||||
service = check_name.split("_")[0]
|
||||
for s in status:
|
||||
Status(s)
|
||||
if not self._status:
|
||||
self._status = []
|
||||
if s not in self._status:
|
||||
self._status.append(s)
|
||||
except ValueError:
|
||||
raise ScanInvalidStatusError(f"Invalid status provided: {s}.")
|
||||
|
||||
check_findings = execute(
|
||||
service,
|
||||
check_name,
|
||||
global_provider,
|
||||
services_executed,
|
||||
checks_executed,
|
||||
custom_checks_metadata,
|
||||
)
|
||||
all_findings.extend(check_findings)
|
||||
# Load bulk compliance frameworks
|
||||
bulk_compliance_frameworks = Compliance.get_bulk(provider.type)
|
||||
|
||||
# If check does not exists in the provider or is from another provider
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
# Get bulk checks metadata for the provider
|
||||
bulk_checks_metadata = CheckMetadata.get_bulk(provider.type)
|
||||
# Complete checks metadata with the compliance framework specification
|
||||
bulk_checks_metadata = update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks, bulk_checks_metadata
|
||||
)
|
||||
|
||||
return all_findings
|
||||
# Create a list of valid categories
|
||||
valid_categories = set()
|
||||
for check, metadata in bulk_checks_metadata.items():
|
||||
for category in metadata.Categories:
|
||||
if category not in valid_categories:
|
||||
valid_categories.add(category)
|
||||
|
||||
# Validate checks
|
||||
if checks:
|
||||
for check in checks:
|
||||
if check not in bulk_checks_metadata.keys():
|
||||
raise ScanInvalidCheckError(f"Invalid check provided: {check}.")
|
||||
|
||||
# Validate services
|
||||
if services:
|
||||
for service in services:
|
||||
if service not in list_services(provider.type):
|
||||
raise ScanInvalidServiceError(
|
||||
f"Invalid service provided: {service}."
|
||||
)
|
||||
|
||||
# Validate compliances
|
||||
if compliances:
|
||||
for compliance in compliances:
|
||||
if compliance not in bulk_compliance_frameworks.keys():
|
||||
raise ScanInvalidComplianceFrameworkError(
|
||||
f"Invalid compliance provided: {compliance}."
|
||||
)
|
||||
|
||||
# Validate categories
|
||||
if categories:
|
||||
for category in categories:
|
||||
if category not in valid_categories:
|
||||
raise ScanInvalidCategoryError(
|
||||
f"Invalid category provided: {category}."
|
||||
)
|
||||
|
||||
# Validate severity
|
||||
if severities:
|
||||
for severity in severities:
|
||||
try:
|
||||
Severity(severity)
|
||||
except ValueError:
|
||||
raise ScanInvalidSeverityError(
|
||||
f"Invalid severity provided: {severity}."
|
||||
)
|
||||
|
||||
# Load checks to execute
|
||||
self._checks_to_execute = sorted(
|
||||
load_checks_to_execute(
|
||||
bulk_checks_metadata=bulk_checks_metadata,
|
||||
bulk_compliance_frameworks=bulk_compliance_frameworks,
|
||||
check_list=checks,
|
||||
service_list=services,
|
||||
compliance_frameworks=compliances,
|
||||
categories=categories,
|
||||
severities=severities,
|
||||
provider=provider.type,
|
||||
checks_file=None,
|
||||
)
|
||||
)
|
||||
|
||||
# Exclude checks
|
||||
if excluded_checks:
|
||||
for check in excluded_checks:
|
||||
if check in self._checks_to_execute:
|
||||
self._checks_to_execute.remove(check)
|
||||
else:
|
||||
raise ScanInvalidCheckError(
|
||||
f"Invalid check provided: {check}. Check does not exist in the provider."
|
||||
)
|
||||
|
||||
# Exclude services
|
||||
if excluded_services:
|
||||
for check in self._checks_to_execute:
|
||||
if get_service_name_from_check_name(check) in excluded_services:
|
||||
self._checks_to_execute.remove(check)
|
||||
else:
|
||||
raise ScanInvalidServiceError(
|
||||
f"Invalid service provided: {check}. Service does not exist in the provider."
|
||||
)
|
||||
|
||||
self._number_of_checks_to_execute = len(self._checks_to_execute)
|
||||
|
||||
service_checks_to_execute = get_service_checks_to_execute(
|
||||
self._checks_to_execute
|
||||
)
|
||||
service_checks_completed = dict()
|
||||
|
||||
self._service_checks_to_execute = service_checks_to_execute
|
||||
self._service_checks_completed = service_checks_completed
|
||||
|
||||
@property
|
||||
def checks_to_execute(self) -> list[str]:
|
||||
return self._checks_to_execute
|
||||
|
||||
@property
|
||||
def service_checks_to_execute(self) -> dict[str, set[str]]:
|
||||
return self._service_checks_to_execute
|
||||
|
||||
@property
|
||||
def service_checks_completed(self) -> dict[str, set[str]]:
|
||||
return self._service_checks_completed
|
||||
|
||||
@property
|
||||
def provider(self) -> Provider:
|
||||
return self._provider
|
||||
|
||||
@property
|
||||
def progress(self) -> float:
|
||||
return (
|
||||
self._number_of_checks_completed / self._number_of_checks_to_execute * 100
|
||||
)
|
||||
|
||||
@property
|
||||
def duration(self) -> int:
|
||||
return self._duration
|
||||
|
||||
@property
|
||||
def findings(self) -> list:
|
||||
return self._findings
|
||||
|
||||
def scan(
|
||||
self,
|
||||
custom_checks_metadata: dict = {},
|
||||
) -> Generator[tuple[float, list[Finding]], None, None]:
|
||||
"""
|
||||
Executes the scan by iterating over the checks to execute and executing each check.
|
||||
Yields the progress and findings for each check.
|
||||
|
||||
Args:
|
||||
custom_checks_metadata (dict): Custom metadata for the checks (default: {}).
|
||||
|
||||
Yields:
|
||||
Tuple[float, list[Finding]]: A tuple containing the progress and findings for each check.
|
||||
|
||||
Raises:
|
||||
ModuleNotFoundError: If the check does not exist in the provider or is from another provider.
|
||||
Exception: If any other error occurs during the execution of a check.
|
||||
"""
|
||||
try:
|
||||
checks_to_execute = self.checks_to_execute
|
||||
# Initialize the Audit Metadata
|
||||
# TODO: this should be done in the provider class
|
||||
# Refactor(Core): Audit manager?
|
||||
self._provider.audit_metadata = Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=checks_to_execute,
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
)
|
||||
|
||||
start_time = datetime.datetime.now()
|
||||
|
||||
for check_name in checks_to_execute:
|
||||
try:
|
||||
# Recover service from check name
|
||||
service = get_service_name_from_check_name(check_name)
|
||||
try:
|
||||
# Import check module
|
||||
check_module_path = f"prowler.providers.{self._provider.type}.services.{service}.{check_name}.{check_name}"
|
||||
lib = import_check(check_module_path)
|
||||
# Recover functions from check
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
check = check_to_execute()
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
f"Check '{check_name}' was not found for the {self._provider.type.upper()} provider"
|
||||
)
|
||||
continue
|
||||
# Execute the check
|
||||
check_findings = execute(
|
||||
check,
|
||||
self._provider,
|
||||
custom_checks_metadata,
|
||||
output_options=None,
|
||||
)
|
||||
|
||||
# Filter the findings by the status
|
||||
if self._status:
|
||||
for finding in check_findings:
|
||||
if finding.status not in self._status:
|
||||
check_findings.remove(finding)
|
||||
|
||||
# Store findings
|
||||
self._findings.extend(check_findings)
|
||||
|
||||
# Remove the executed check
|
||||
self._service_checks_to_execute[service].remove(check_name)
|
||||
if len(self._service_checks_to_execute[service]) == 0:
|
||||
self._service_checks_to_execute.pop(service, None)
|
||||
# Add the completed check
|
||||
if service not in self._service_checks_completed:
|
||||
self._service_checks_completed[service] = set()
|
||||
self._service_checks_completed[service].add(check_name)
|
||||
self._number_of_checks_completed += 1
|
||||
|
||||
# This should be done just once all the service's checks are completed
|
||||
# This metadata needs to get to the services not within the provider
|
||||
# since it is present in the Scan class
|
||||
self._provider.audit_metadata = update_audit_metadata(
|
||||
self._provider.audit_metadata,
|
||||
self.get_completed_services(),
|
||||
self.get_completed_checks(),
|
||||
)
|
||||
|
||||
findings = [
|
||||
Finding.generate_output(
|
||||
self._provider, finding, output_options=None
|
||||
)
|
||||
for finding in check_findings
|
||||
]
|
||||
|
||||
yield self.progress, findings
|
||||
# If check does not exists in the provider or is from another provider
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
f"Check '{check_name}' was not found for the {self._provider.type.upper()} provider"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
# Update the scan duration when all checks are completed
|
||||
self._duration = int((datetime.datetime.now() - start_time).total_seconds())
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def get_completed_services(self) -> set[str]:
|
||||
"""
|
||||
get_completed_services returns the services that have been completed.
|
||||
|
||||
Example:
|
||||
get_completed_services() -> {"ec2", "s3"}
|
||||
"""
|
||||
return self._service_checks_completed.keys()
|
||||
|
||||
def get_completed_checks(self) -> set[str]:
|
||||
"""
|
||||
get_completed_checks returns the checks that have been completed.
|
||||
|
||||
Example:
|
||||
get_completed_checks() -> {"ec2_instance_public_ip", "s3_bucket_public"}
|
||||
"""
|
||||
completed_checks = set()
|
||||
for checks in self._service_checks_completed.values():
|
||||
completed_checks.update(checks)
|
||||
return completed_checks
|
||||
|
||||
|
||||
def get_service_name_from_check_name(check_name: str) -> str:
|
||||
"""
|
||||
get_service_name_from_check_name returns the service name for a given check name.
|
||||
|
||||
Example:
|
||||
get_service_name_from_check_name("ec2_instance_public") -> "ec2"
|
||||
"""
|
||||
return check_name.split("_")[0]
|
||||
|
||||
|
||||
def get_service_checks_to_execute(checks_to_execute: set[str]) -> dict[str, set[str]]:
|
||||
"""
|
||||
get_service_checks_to_execute returns a dictionary with the services and the checks to execute.
|
||||
|
||||
Example:
|
||||
get_service_checks_to_execute({"accessanalyzer_enabled", "ec2_instance_public_ip"})
|
||||
-> {"accessanalyzer": {"accessanalyzer_enabled"}, "ec2": {"ec2_instance_public_ip"}}
|
||||
"""
|
||||
service_checks_to_execute = dict()
|
||||
for check in checks_to_execute:
|
||||
# check -> accessanalyzer_enabled
|
||||
# service -> accessanalyzer
|
||||
service = get_service_name_from_check_name(check)
|
||||
if service not in service_checks_to_execute:
|
||||
service_checks_to_execute[service] = set()
|
||||
service_checks_to_execute[service].add(check)
|
||||
return service_checks_to_execute
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
from operator import attrgetter
|
||||
|
||||
try:
|
||||
import grp
|
||||
@@ -16,11 +17,11 @@ from io import TextIOWrapper
|
||||
from ipaddress import ip_address
|
||||
from os.path import exists
|
||||
from time import mktime
|
||||
from typing import Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
from colorama import Style
|
||||
from detect_secrets import SecretsCollection
|
||||
from detect_secrets.settings import default_settings
|
||||
from detect_secrets.settings import transient_settings
|
||||
|
||||
from prowler.config.config import encoding_format_utf_8
|
||||
from prowler.lib.logger import logger
|
||||
@@ -80,20 +81,96 @@ def hash_sha512(string: str) -> str:
|
||||
return sha512(string.encode(encoding_format_utf_8)).hexdigest()[0:9]
|
||||
|
||||
|
||||
def detect_secrets_scan(data):
|
||||
temp_data_file = tempfile.NamedTemporaryFile(delete=False)
|
||||
temp_data_file.write(bytes(data, encoding="raw_unicode_escape"))
|
||||
temp_data_file.close()
|
||||
def detect_secrets_scan(
|
||||
data: str = None, file=None, excluded_secrets: list[str] = None
|
||||
) -> list[dict[str, str]]:
|
||||
"""detect_secrets_scan scans the data or file for secrets using the detect-secrets library.
|
||||
Args:
|
||||
data (str): The data to scan for secrets.
|
||||
file (str): The file to scan for secrets.
|
||||
excluded_secrets (list): A list of regex patterns to exclude from the scan.
|
||||
Returns:
|
||||
dict: The secrets found in the
|
||||
Raises:
|
||||
Exception: If an error occurs during the scan.
|
||||
Examples:
|
||||
>>> detect_secrets_scan(data="password=password")
|
||||
[{'filename': 'data', 'hashed_secret': 'f7c3bc1d808e04732adf679965ccc34ca7ae3441', 'is_verified': False, 'line_number': 1, 'type': 'Secret Keyword'}]
|
||||
>>> detect_secrets_scan(file="file.txt")
|
||||
{'file.txt': [{'filename': 'file.txt', 'hashed_secret': 'f7c3bc1d808e04732adf679965ccc34ca7ae3441', 'is_verified': False, 'line_number': 1, 'type': 'Secret Keyword'}]}
|
||||
"""
|
||||
try:
|
||||
if not file:
|
||||
temp_data_file = tempfile.NamedTemporaryFile(delete=False)
|
||||
temp_data_file.write(bytes(data, encoding="raw_unicode_escape"))
|
||||
temp_data_file.close()
|
||||
|
||||
secrets = SecretsCollection()
|
||||
with default_settings():
|
||||
secrets.scan_file(temp_data_file.name)
|
||||
os.remove(temp_data_file.name)
|
||||
secrets = SecretsCollection()
|
||||
|
||||
detect_secrets_output = secrets.json()
|
||||
if detect_secrets_output:
|
||||
return detect_secrets_output[temp_data_file.name]
|
||||
else:
|
||||
settings = {
|
||||
"plugins_used": [
|
||||
{"name": "ArtifactoryDetector"},
|
||||
{"name": "AWSKeyDetector"},
|
||||
{"name": "AzureStorageKeyDetector"},
|
||||
{"name": "BasicAuthDetector"},
|
||||
{"name": "CloudantDetector"},
|
||||
{"name": "DiscordBotTokenDetector"},
|
||||
{"name": "GitHubTokenDetector"},
|
||||
{"name": "GitLabTokenDetector"},
|
||||
{"name": "Base64HighEntropyString", "limit": 6.0},
|
||||
{"name": "HexHighEntropyString", "limit": 3.0},
|
||||
{"name": "IbmCloudIamDetector"},
|
||||
{"name": "IbmCosHmacDetector"},
|
||||
# {"name": "IPPublicDetector"}, https://github.com/Yelp/detect-secrets/pull/885
|
||||
{"name": "JwtTokenDetector"},
|
||||
{"name": "KeywordDetector"},
|
||||
{"name": "MailchimpDetector"},
|
||||
{"name": "NpmDetector"},
|
||||
{"name": "OpenAIDetector"},
|
||||
{"name": "PrivateKeyDetector"},
|
||||
{"name": "PypiTokenDetector"},
|
||||
{"name": "SendGridDetector"},
|
||||
{"name": "SlackDetector"},
|
||||
{"name": "SoftlayerDetector"},
|
||||
{"name": "SquareOAuthDetector"},
|
||||
{"name": "StripeDetector"},
|
||||
# {"name": "TelegramBotTokenDetector"}, https://github.com/Yelp/detect-secrets/pull/878
|
||||
{"name": "TwilioKeyDetector"},
|
||||
],
|
||||
"filters_used": [
|
||||
{"path": "detect_secrets.filters.common.is_invalid_file"},
|
||||
{"path": "detect_secrets.filters.common.is_known_false_positive"},
|
||||
{"path": "detect_secrets.filters.heuristic.is_likely_id_string"},
|
||||
{"path": "detect_secrets.filters.heuristic.is_potential_secret"},
|
||||
],
|
||||
}
|
||||
if excluded_secrets and len(excluded_secrets) > 0:
|
||||
settings["filters_used"].append(
|
||||
{
|
||||
"path": "detect_secrets.filters.regex.should_exclude_line",
|
||||
"pattern": excluded_secrets,
|
||||
}
|
||||
)
|
||||
with transient_settings(settings):
|
||||
if file:
|
||||
secrets.scan_file(file)
|
||||
else:
|
||||
secrets.scan_file(temp_data_file.name)
|
||||
|
||||
if not file:
|
||||
os.remove(temp_data_file.name)
|
||||
|
||||
detect_secrets_output = secrets.json()
|
||||
|
||||
if detect_secrets_output:
|
||||
if file:
|
||||
return detect_secrets_output[file]
|
||||
else:
|
||||
return detect_secrets_output[temp_data_file.name]
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error scanning for secrets: {e}")
|
||||
return None
|
||||
|
||||
|
||||
@@ -196,3 +273,44 @@ def print_boxes(messages: list, report_title: str):
|
||||
f"{Style.BRIGHT}{Style.RESET_ALL} · {message}{Style.BRIGHT}{Style.RESET_ALL}"
|
||||
)
|
||||
print()
|
||||
|
||||
|
||||
def dict_to_lowercase(d):
|
||||
"""
|
||||
Convert all keys in a dictionary to lowercase.
|
||||
This function takes a dictionary and returns a new dictionary
|
||||
with all the keys converted to lowercase. If a value in the
|
||||
dictionary is another dictionary, the function will recursively
|
||||
convert the keys of that dictionary to lowercase as well.
|
||||
Args:
|
||||
d (dict): The dictionary to convert.
|
||||
Returns:
|
||||
dict: A new dictionary with all keys in lowercase.
|
||||
"""
|
||||
|
||||
new_dict = {}
|
||||
for k, v in d.items():
|
||||
if isinstance(v, dict):
|
||||
v = dict_to_lowercase(v)
|
||||
new_dict[k.lower()] = v
|
||||
return new_dict
|
||||
|
||||
|
||||
def get_nested_attribute(obj: Any, attr: str) -> Any:
|
||||
"""
|
||||
Get a nested attribute from an object.
|
||||
Args:
|
||||
obj (Any): The object to get the attribute from.
|
||||
attr (str): The attribute to get.
|
||||
Returns:
|
||||
Any: The attribute value if present, otherwise "".
|
||||
"""
|
||||
try:
|
||||
return attrgetter(attr)(obj)
|
||||
except AttributeError:
|
||||
return ""
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return ""
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
222
prowler/providers/aws/exceptions/exceptions.py
Normal file
222
prowler/providers/aws/exceptions/exceptions.py
Normal file
@@ -0,0 +1,222 @@
|
||||
from prowler.exceptions.exceptions import ProwlerException
|
||||
|
||||
|
||||
# Exceptions codes from 1000 to 1999 are reserved for AWS exceptions
|
||||
class AWSBaseException(ProwlerException):
|
||||
"""Base class for AWS errors."""
|
||||
|
||||
AWS_ERROR_CODES = {
|
||||
(1000, "AWSClientError"): {
|
||||
"message": "AWS ClientError occurred",
|
||||
"remediation": "Check your AWS client configuration and permissions.",
|
||||
},
|
||||
(1001, "AWSProfileNotFoundError"): {
|
||||
"message": "AWS Profile not found",
|
||||
"remediation": "Ensure the AWS profile is correctly configured, please visit https://docs.aws.amazon.com/cli/v1/userguide/cli-configure-files.html",
|
||||
},
|
||||
(1002, "AWSNoCredentialsError"): {
|
||||
"message": "No AWS credentials found",
|
||||
"remediation": "Verify that AWS credentials are properly set up, please visit https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/aws/authentication/ and https://docs.aws.amazon.com/cli/v1/userguide/cli-chap-configure.html",
|
||||
},
|
||||
(1003, "AWSArgumentTypeValidationError"): {
|
||||
"message": "AWS argument type validation error",
|
||||
"remediation": "Check the provided argument types specific to AWS and ensure they meet the required format. For session duration check: https://docs.aws.amazon.com/singlesignon/latest/userguide/howtosessionduration.html and for role session name check: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_terms-and-concepts.html#iam-term-role-session-name",
|
||||
},
|
||||
(1004, "AWSSetUpSessionError"): {
|
||||
"message": "AWS session setup error",
|
||||
"remediation": "Check the AWS session setup and ensure it is properly configured, please visit https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html and check if the provided profile has the necessary permissions.",
|
||||
},
|
||||
(1005, "AWSIAMRoleARNRegionNotEmtpyError"): {
|
||||
"message": "AWS IAM Role ARN region is not empty",
|
||||
"remediation": "Check the AWS IAM Role ARN region and ensure it is empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
|
||||
},
|
||||
(1006, "AWSIAMRoleARNPartitionEmptyError"): {
|
||||
"message": "AWS IAM Role ARN partition is empty",
|
||||
"remediation": "Check the AWS IAM Role ARN partition and ensure it is not empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
|
||||
},
|
||||
(1007, "AWSIAMRoleARNMissingFieldsError"): {
|
||||
"message": "AWS IAM Role ARN missing fields",
|
||||
"remediation": "Check the AWS IAM Role ARN and ensure all required fields are present, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
|
||||
},
|
||||
(1008, "AWSIAMRoleARNServiceNotIAMnorSTSError"): {
|
||||
"message": "AWS IAM Role ARN service is not IAM nor STS",
|
||||
"remediation": "Check the AWS IAM Role ARN service and ensure it is either IAM or STS, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
|
||||
},
|
||||
(1009, "AWSIAMRoleARNInvalidAccountIDError"): {
|
||||
"message": "AWS IAM Role ARN account ID is invalid",
|
||||
"remediation": "Check the AWS IAM Role ARN account ID and ensure it is a valid 12-digit number, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
|
||||
},
|
||||
(1010, "AWSIAMRoleARNInvalidResourceTypeError"): {
|
||||
"message": "AWS IAM Role ARN resource type is invalid",
|
||||
"remediation": "Check the AWS IAM Role ARN resource type and ensure it is valid, resources types are: role, user, assumed-role, root, federated-user, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
|
||||
},
|
||||
(1011, "AWSIAMRoleARNEmptyResourceError"): {
|
||||
"message": "AWS IAM Role ARN resource is empty",
|
||||
"remediation": "Check the AWS IAM Role ARN resource and ensure it is not empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
|
||||
},
|
||||
(1012, "AWSAssumeRoleError"): {
|
||||
"message": "AWS assume role error",
|
||||
"remediation": "Check the AWS assume role configuration and ensure it is properly set up, please visit https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/aws/role-assumption/ and https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_terms-and-concepts.html#iam-term-role-session-name",
|
||||
},
|
||||
(1013, "AWSAccessKeyIDInvalidError"): {
|
||||
"message": "AWS Access Key ID or Session Token is invalid",
|
||||
"remediation": "Check your AWS Access Key ID or Session Token and ensure it is valid.",
|
||||
},
|
||||
(1014, "AWSSecretAccessKeyInvalidError"): {
|
||||
"message": "AWS Secret Access Key is invalid",
|
||||
"remediation": "Check your AWS Secret Access Key and signing method and ensure it is valid.",
|
||||
},
|
||||
(1015, "AWSInvalidProviderIdError"): {
|
||||
"message": "The provided AWS credentials belong to a different account",
|
||||
"remediation": "Check the provided AWS credentials and review if belong to the account you want to use.",
|
||||
},
|
||||
(1016, "AWSSessionTokenExpiredError"): {
|
||||
"message": "The provided AWS Session Token is expired",
|
||||
"remediation": "Get a new AWS Session Token and configure it for the provider.",
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
error_info = self.AWS_ERROR_CODES.get((code, self.__class__.__name__))
|
||||
if message:
|
||||
error_info["message"] = message
|
||||
super().__init__(
|
||||
code,
|
||||
source="AWS",
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
error_info=error_info,
|
||||
)
|
||||
|
||||
|
||||
class AWSCredentialsError(AWSBaseException):
|
||||
"""Base class for AWS credentials errors."""
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
super().__init__(code, file, original_exception, message)
|
||||
|
||||
|
||||
class AWSClientError(AWSCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1000, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSProfileNotFoundError(AWSCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1001, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSNoCredentialsError(AWSCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1002, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSArgumentTypeValidationError(AWSBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1003, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSSetUpSessionError(AWSBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1004, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSRoleArnError(AWSBaseException):
|
||||
"""Base class for AWS role ARN errors."""
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
super().__init__(code, file, original_exception, message)
|
||||
|
||||
|
||||
class AWSIAMRoleARNRegionNotEmtpyError(AWSRoleArnError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1005, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSIAMRoleARNPartitionEmptyError(AWSRoleArnError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1006, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSIAMRoleARNMissingFieldsError(AWSRoleArnError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1007, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSIAMRoleARNServiceNotIAMnorSTSError(AWSRoleArnError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1008, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSIAMRoleARNInvalidAccountIDError(AWSRoleArnError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1009, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSIAMRoleARNInvalidResourceTypeError(AWSRoleArnError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1010, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSIAMRoleARNEmptyResourceError(AWSRoleArnError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1011, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSAssumeRoleError(AWSBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1012, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSAccessKeyIDInvalidError(AWSCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1013, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSSecretAccessKeyInvalidError(AWSCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1014, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSInvalidProviderIdError(AWSCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1015, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class AWSSessionTokenExpiredError(AWSCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1016, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
@@ -168,13 +168,40 @@ def init_parser(self):
|
||||
)
|
||||
|
||||
|
||||
def validate_session_duration(duration):
|
||||
"""validate_session_duration validates that the AWS STS Assume Role Session Duration is between 900 and 43200 seconds."""
|
||||
duration = int(duration)
|
||||
def validate_session_duration(session_duration: int) -> int:
|
||||
"""validate_session_duration validates that the input session_duration is valid"""
|
||||
duration = int(session_duration)
|
||||
# Since the range(i,j) goes from i to j-1 we have to j+1
|
||||
if duration not in range(900, 43201):
|
||||
raise ArgumentTypeError("Session duration must be between 900 and 43200")
|
||||
return duration
|
||||
raise ArgumentTypeError(
|
||||
"Session duration must be between 900 and 43200 seconds"
|
||||
)
|
||||
else:
|
||||
return duration
|
||||
|
||||
|
||||
def validate_role_session_name(session_name) -> str:
|
||||
"""
|
||||
Validates that the role session name is valid.
|
||||
|
||||
Args:
|
||||
session_name (str): The role session name to be validated.
|
||||
|
||||
Returns:
|
||||
str: The validated role session name.
|
||||
|
||||
Raises:
|
||||
ArgumentTypeError: If the role session name is invalid.
|
||||
|
||||
Documentation:
|
||||
- AWS STS AssumeRole API: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
|
||||
"""
|
||||
if fullmatch(r"[\w+=,.@-]{2,64}", session_name):
|
||||
return session_name
|
||||
else:
|
||||
raise ArgumentTypeError(
|
||||
"Role session name must be between 2 and 64 characters long and may contain alphanumeric characters, hyphens, underscores, plus signs, equal signs, commas, periods, at signs, and tildes."
|
||||
)
|
||||
|
||||
|
||||
def validate_arguments(arguments: Namespace) -> tuple[bool, str]:
|
||||
@@ -195,7 +222,7 @@ def validate_arguments(arguments: Namespace) -> tuple[bool, str]:
|
||||
return (True, "")
|
||||
|
||||
|
||||
def validate_bucket(bucket_name):
|
||||
def validate_bucket(bucket_name: str) -> str:
|
||||
"""validate_bucket validates that the input bucket_name is valid"""
|
||||
if search("(?!(^xn--|.+-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$", bucket_name):
|
||||
return bucket_name
|
||||
@@ -203,16 +230,3 @@ def validate_bucket(bucket_name):
|
||||
raise ArgumentTypeError(
|
||||
"Bucket name must be valid (https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html)"
|
||||
)
|
||||
|
||||
|
||||
def validate_role_session_name(session_name):
|
||||
"""
|
||||
validates that the role session name is valid
|
||||
Documentation: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
|
||||
"""
|
||||
if fullmatch(r"[\w+=,.@-]{2,64}", session_name):
|
||||
return session_name
|
||||
else:
|
||||
raise ArgumentTypeError(
|
||||
"Role Session Name must be 2-64 characters long and consist only of upper- and lower-case alphanumeric characters with no spaces. You can also include underscores or any of the following characters: =,.@-"
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user