mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
1056 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0ba2dee434 | ||
|
|
7ad152f5fe | ||
|
|
98d27ecfa0 | ||
|
|
15e4107065 | ||
|
|
53a72a3c7c | ||
|
|
f44b20ff4c | ||
|
|
231fcf98d0 | ||
|
|
c7093013f9 | ||
|
|
6e96cb0874 | ||
|
|
296fa0f984 | ||
|
|
9a46fca8dd | ||
|
|
66e5a03f9f | ||
|
|
ccd561f0f1 | ||
|
|
9e1b78e64f | ||
|
|
536f90ced3 | ||
|
|
5453c02fd4 | ||
|
|
230e11be8a | ||
|
|
20625954a3 | ||
|
|
51b67f00d6 | ||
|
|
ab378684ae | ||
|
|
e89df617ef | ||
|
|
8496a6b045 | ||
|
|
28f3cf363b | ||
|
|
eb3d4b25e3 | ||
|
|
1211fe706e | ||
|
|
c4a9280ebb | ||
|
|
0f12fb92ed | ||
|
|
ee974a6316 | ||
|
|
d004a0c931 | ||
|
|
087e01cc4f | ||
|
|
74940e1fc4 | ||
|
|
19e35bf9a8 | ||
|
|
7213187e6c | ||
|
|
4b104e92f0 | ||
|
|
7179119b0e | ||
|
|
cf2738810a | ||
|
|
389216570a | ||
|
|
2becf45f33 | ||
|
|
c32ce7eb97 | ||
|
|
94e66a91a6 | ||
|
|
1ac4417f74 | ||
|
|
57c5f7c12d | ||
|
|
19203f92b3 | ||
|
|
c5b1bf3e52 | ||
|
|
f845176494 | ||
|
|
f0ed866946 | ||
|
|
834a7d3b69 | ||
|
|
24a50c6ac2 | ||
|
|
ec8afd773f | ||
|
|
a09be4c0ba | ||
|
|
4b62fdcf53 | ||
|
|
bf0013dae3 | ||
|
|
c82cd5288c | ||
|
|
ad31a6b3f5 | ||
|
|
20c7c9f8de | ||
|
|
0cfe41e452 | ||
|
|
1b254feadc | ||
|
|
15954d8a01 | ||
|
|
ff122c9779 | ||
|
|
a012397e55 | ||
|
|
7da6d7b5dd | ||
|
|
db6a27d1f5 | ||
|
|
e07c833cab | ||
|
|
728fc9d6ff | ||
|
|
cf9ff78605 | ||
|
|
a2faf548af | ||
|
|
8bcec4926b | ||
|
|
a4e96f809b | ||
|
|
fa27255dd7 | ||
|
|
05360e469f | ||
|
|
9d405ddcbd | ||
|
|
430f831543 | ||
|
|
da9d7199b7 | ||
|
|
d63a383ec6 | ||
|
|
55c226029e | ||
|
|
8d2f6aa30c | ||
|
|
a319f80701 | ||
|
|
15a8671f0d | ||
|
|
d34e709d91 | ||
|
|
ddc53c3c6d | ||
|
|
a3aef18cfe | ||
|
|
49ca3ca325 | ||
|
|
89c67079a3 | ||
|
|
2de8075d87 | ||
|
|
e124275dbf | ||
|
|
760d28e752 | ||
|
|
3fb0733887 | ||
|
|
7de9a37edb | ||
|
|
fe00b788cc | ||
|
|
4c50f4d811 | ||
|
|
c0c736bffe | ||
|
|
a3aa7d0a63 | ||
|
|
3ceb86c4d9 | ||
|
|
3628e7b3e8 | ||
|
|
f29c2ac9f0 | ||
|
|
b4927c3ad1 | ||
|
|
19f3c1d310 | ||
|
|
cd97e57521 | ||
|
|
b38207507a | ||
|
|
ab96e0aac0 | ||
|
|
4477cecc59 | ||
|
|
641d671312 | ||
|
|
e7c2fa0699 | ||
|
|
7eb08b0f14 | ||
|
|
6f3112f754 | ||
|
|
f5ecae6da1 | ||
|
|
1c75f6b804 | ||
|
|
91b64d8572 | ||
|
|
233ae74560 | ||
|
|
fac97f9785 | ||
|
|
e81c7a3893 | ||
|
|
d6f26df2e8 | ||
|
|
ece74e15fd | ||
|
|
eea6d07259 | ||
|
|
4a6d7a5be2 | ||
|
|
883c5d4e56 | ||
|
|
f1f998c2fa | ||
|
|
5276e38f1d | ||
|
|
ad98a4747f | ||
|
|
5798321dc6 | ||
|
|
bf58728d29 | ||
|
|
fcea3b6570 | ||
|
|
965111245a | ||
|
|
f78a29206c | ||
|
|
c719d705e0 | ||
|
|
8948ee6868 | ||
|
|
24fb31e98f | ||
|
|
c8b193e658 | ||
|
|
6d27738c4d | ||
|
|
17b7becfdf | ||
|
|
cfa7f271d2 | ||
|
|
e61a97cb65 | ||
|
|
cd4a1ad8a7 | ||
|
|
e650d19a30 | ||
|
|
f930739a3d | ||
|
|
89fc698a0e | ||
|
|
6acb6bbf8e | ||
|
|
971424f822 | ||
|
|
9ba1ae1ced | ||
|
|
062db4cc70 | ||
|
|
dc4db10c41 | ||
|
|
68a542ef64 | ||
|
|
32f3787e18 | ||
|
|
6792bea319 | ||
|
|
ae4b43c137 | ||
|
|
d576c4f1c4 | ||
|
|
ddc0596aa2 | ||
|
|
636bdb6d0a | ||
|
|
4a839b0146 | ||
|
|
73e244dce5 | ||
|
|
d8ed70236b | ||
|
|
bcc96ab4f2 | ||
|
|
fd53a8c9d0 | ||
|
|
7b58d1dd56 | ||
|
|
7858c147f7 | ||
|
|
8e635b3bd4 | ||
|
|
2e97e37316 | ||
|
|
cd804836a1 | ||
|
|
d102ee2fd5 | ||
|
|
325e5739a2 | ||
|
|
98da3059b4 | ||
|
|
80fd5d1ba6 | ||
|
|
85242c7909 | ||
|
|
ea6ab406c8 | ||
|
|
cbf2a28bac | ||
|
|
5b1e7bb7f9 | ||
|
|
e108b2caed | ||
|
|
df1abb2152 | ||
|
|
e0465f2aa2 | ||
|
|
51467767cd | ||
|
|
bc71e7fb3b | ||
|
|
6a331c05e8 | ||
|
|
7ab503a096 | ||
|
|
b368190c9f | ||
|
|
8915fdff18 | ||
|
|
9bf108e9cc | ||
|
|
87708e39cf | ||
|
|
44927c44e9 | ||
|
|
71aa29cf24 | ||
|
|
aa14daf0db | ||
|
|
eb5dbab86e | ||
|
|
223aab8ece | ||
|
|
3ec57340a0 | ||
|
|
80d73cc05b | ||
|
|
94f02df11e | ||
|
|
c454ceb296 | ||
|
|
76ec13a1d6 | ||
|
|
783b6ea982 | ||
|
|
6b7b700a98 | ||
|
|
b3f2a1c532 | ||
|
|
c4e1bd3ed2 | ||
|
|
d0d4e0d483 | ||
|
|
14a9f0e765 | ||
|
|
b572575c8d | ||
|
|
a626e41162 | ||
|
|
22343faa1e | ||
|
|
c5b37887ef | ||
|
|
f9aed36d0b | ||
|
|
facc0627d7 | ||
|
|
76f0d890e9 | ||
|
|
7de7122c3b | ||
|
|
1b73ab2fe4 | ||
|
|
cc8f6131e6 | ||
|
|
dfd5c9aee7 | ||
|
|
3986bf3f42 | ||
|
|
c45ef1e286 | ||
|
|
8d8f498dc2 | ||
|
|
c4bd9122d4 | ||
|
|
644cdc81b9 | ||
|
|
e5584f21b3 | ||
|
|
b868d39bef | ||
|
|
ef9809f61f | ||
|
|
9a04ca3611 | ||
|
|
1c9b3a1394 | ||
|
|
5ee7bd6459 | ||
|
|
05d2b86ba8 | ||
|
|
84c30af6f8 | ||
|
|
e8a829b75e | ||
|
|
a0d169470d | ||
|
|
1fd6046511 | ||
|
|
524455b0f3 | ||
|
|
e6e1e37c1e | ||
|
|
2914510735 | ||
|
|
7e43c7797f | ||
|
|
6954ef880e | ||
|
|
5f5e7015a9 | ||
|
|
bfafa518b1 | ||
|
|
e34e59ff2d | ||
|
|
7f80d2db46 | ||
|
|
4a2a3921da | ||
|
|
e26b2e6527 | ||
|
|
954814c1d7 | ||
|
|
113224cbd9 | ||
|
|
f5f1fce779 | ||
|
|
0ba9383202 | ||
|
|
8e9a9797c7 | ||
|
|
2b4e6bffae | ||
|
|
74f7a86c2b | ||
|
|
e218435b2f | ||
|
|
5ec34ad5e7 | ||
|
|
c4b0859efd | ||
|
|
1241a490f9 | ||
|
|
4ec498a612 | ||
|
|
119c5e80a9 | ||
|
|
d393bc48a2 | ||
|
|
e09e3855b1 | ||
|
|
8751615faa | ||
|
|
e7c17ab0b3 | ||
|
|
f05d3eb334 | ||
|
|
cf449d4607 | ||
|
|
b338ac9add | ||
|
|
366d2b392a | ||
|
|
41fc536b44 | ||
|
|
e042445ecf | ||
|
|
c17129afe3 | ||
|
|
4876d8435c | ||
|
|
1bd0d774e5 | ||
|
|
c119cece89 | ||
|
|
e24b211d22 | ||
|
|
c589c95727 | ||
|
|
7e4f1a73bf | ||
|
|
4d00aece45 | ||
|
|
49aaf011aa | ||
|
|
898934c7f8 | ||
|
|
81c4b5a9c1 | ||
|
|
fe31656ffe | ||
|
|
359059dee6 | ||
|
|
2eaa37921d | ||
|
|
3a99909b75 | ||
|
|
2ecd9ad2c5 | ||
|
|
50dc396aa3 | ||
|
|
acf333493a | ||
|
|
bd6272f5a7 | ||
|
|
8c95e1efaf | ||
|
|
845a0aa0d5 | ||
|
|
75a11be9e6 | ||
|
|
a778d005b6 | ||
|
|
1281f4ec5e | ||
|
|
6332427e5e | ||
|
|
d89df83904 | ||
|
|
be420afebc | ||
|
|
fb914a2c90 | ||
|
|
4ac3cfc33d | ||
|
|
c74360ab63 | ||
|
|
4dc4d82d42 | ||
|
|
6e7a32cb51 | ||
|
|
49e501c4be | ||
|
|
9ee78fe65f | ||
|
|
7a0549d39c | ||
|
|
3e8c86d880 | ||
|
|
e34c18757d | ||
|
|
5c1a47d108 | ||
|
|
59c51d5a4a | ||
|
|
66aa67f636 | ||
|
|
bdda377482 | ||
|
|
aa11ed70bd | ||
|
|
0580dca6cf | ||
|
|
678ef0ab5a | ||
|
|
4888c27713 | ||
|
|
b256c10622 | ||
|
|
878e4e0bbc | ||
|
|
6c3653c483 | ||
|
|
71ac703e6f | ||
|
|
a89e3598f2 | ||
|
|
5d043cc929 | ||
|
|
921f94ebbf | ||
|
|
48c9ed8a79 | ||
|
|
12987ec9f9 | ||
|
|
40b90ed063 | ||
|
|
60314e781f | ||
|
|
bc56d48595 | ||
|
|
2d71cef3d5 | ||
|
|
41f6637497 | ||
|
|
c2e54bbbcc | ||
|
|
df8aacd09d | ||
|
|
2dd6be59b9 | ||
|
|
9e8e3eb0e6 | ||
|
|
3728430f8c | ||
|
|
ea97de7f43 | ||
|
|
f254a4bc0d | ||
|
|
66acfd8691 | ||
|
|
02ca82004f | ||
|
|
60b5a79b27 | ||
|
|
be1e3e942b | ||
|
|
3658e85cfc | ||
|
|
15e4d1acce | ||
|
|
44afd9ed31 | ||
|
|
4f099c5663 | ||
|
|
eaec683eb9 | ||
|
|
50bcd828e9 | ||
|
|
91545e409e | ||
|
|
33031d2c96 | ||
|
|
1b42dda817 | ||
|
|
f726d964a8 | ||
|
|
36aaec8a55 | ||
|
|
99164ce93e | ||
|
|
7ebc5d3c31 | ||
|
|
06ff3db8af | ||
|
|
c44ea3943e | ||
|
|
d036e0054b | ||
|
|
f72eb7e212 | ||
|
|
62dcbc2961 | ||
|
|
dddec4c688 | ||
|
|
6d00554082 | ||
|
|
65d3fcee4c | ||
|
|
16cd0e4661 | ||
|
|
6e184dae93 | ||
|
|
118f3d163d | ||
|
|
7d84d67935 | ||
|
|
1c1c58c975 | ||
|
|
31ea672c61 | ||
|
|
7016779b8e | ||
|
|
4e958fdf39 | ||
|
|
c6259b6c75 | ||
|
|
021e243ada | ||
|
|
acdf420941 | ||
|
|
4e84507130 | ||
|
|
2a61610fec | ||
|
|
9b127eba93 | ||
|
|
1a89d65516 | ||
|
|
84749df708 | ||
|
|
6f7cd85a18 | ||
|
|
ad39061e1a | ||
|
|
615bacccaf | ||
|
|
b3a2479fab | ||
|
|
871c877a33 | ||
|
|
7fd58de3bf | ||
|
|
40f24b4d70 | ||
|
|
d8f80699d4 | ||
|
|
f24d0efc77 | ||
|
|
a18dd76a5a | ||
|
|
a2362b4bbc | ||
|
|
e5f1c2b19c | ||
|
|
0490ab6944 | ||
|
|
97baa8a1e6 | ||
|
|
637ebdc3db | ||
|
|
451b36093f | ||
|
|
beb0457aff | ||
|
|
0335ea4e0b | ||
|
|
355abca5a3 | ||
|
|
7d69cc4cd9 | ||
|
|
cdc4b362a4 | ||
|
|
6417e6bbba | ||
|
|
b810d45d34 | ||
|
|
f5a2695c3b | ||
|
|
977c788fff | ||
|
|
21f8b5dbad | ||
|
|
1c874d1283 | ||
|
|
8f9bdae2b7 | ||
|
|
600813fb99 | ||
|
|
5a9ccd60a0 | ||
|
|
beb7a53efe | ||
|
|
8431ce42a1 | ||
|
|
c5a9b63970 | ||
|
|
a765c1543e | ||
|
|
484a773f5b | ||
|
|
9ecf570790 | ||
|
|
f8c840f283 | ||
|
|
deec9efa97 | ||
|
|
2ee62cca8e | ||
|
|
413b948ca0 | ||
|
|
d548e869fa | ||
|
|
5c8919372c | ||
|
|
9baac9fd89 | ||
|
|
252b664e49 | ||
|
|
496e0f1e0a | ||
|
|
80342d612f | ||
|
|
02d7eaf268 | ||
|
|
1a8df3bf18 | ||
|
|
16f2209d3f | ||
|
|
70e22af550 | ||
|
|
44f26bc0d5 | ||
|
|
a19f5d9a9a | ||
|
|
b78f53a722 | ||
|
|
c20f07ced4 | ||
|
|
7c3a53908b | ||
|
|
ea3c71e22c | ||
|
|
40eaa79777 | ||
|
|
aa8119970e | ||
|
|
55fc8cb55b | ||
|
|
abf51eceee | ||
|
|
458c51dda3 | ||
|
|
c8d2a44ab0 | ||
|
|
0a71628298 | ||
|
|
60e0040577 | ||
|
|
5c375d63c5 | ||
|
|
4d84529ba2 | ||
|
|
0737d9e8bb | ||
|
|
50c5294bc0 | ||
|
|
f63e9e5e77 | ||
|
|
3cab52772c | ||
|
|
81aa035451 | ||
|
|
899f31f1ee | ||
|
|
e142a9e0f4 | ||
|
|
ed26c2c42c | ||
|
|
1017510a67 | ||
|
|
bfa16607b0 | ||
|
|
4c874b68f5 | ||
|
|
9458e2bbc4 | ||
|
|
2da7b926ed | ||
|
|
8d4f0ab90a | ||
|
|
83aefc42c1 | ||
|
|
a6489f39fd | ||
|
|
15c34952cf | ||
|
|
d002f2f719 | ||
|
|
8530676419 | ||
|
|
fe5a78e4d4 | ||
|
|
d823b2b9de | ||
|
|
3b17eb024c | ||
|
|
87951a8371 | ||
|
|
e5ca51d1e7 | ||
|
|
e2fd3fe36e | ||
|
|
6b0d73d7f9 | ||
|
|
7eec60f4d9 | ||
|
|
9d788af932 | ||
|
|
bbc0388d4d | ||
|
|
887db29d96 | ||
|
|
ae74cab70a | ||
|
|
e6d48c1fa4 | ||
|
|
d5ab72a97c | ||
|
|
473631f83b | ||
|
|
a580b1ee04 | ||
|
|
844dd5ba95 | ||
|
|
44f8e4c488 | ||
|
|
180eb61fee | ||
|
|
9828824b73 | ||
|
|
c938a25693 | ||
|
|
cccd69f27c | ||
|
|
3949806b5d | ||
|
|
e7d249784d | ||
|
|
25b1efe532 | ||
|
|
c289ddacf2 | ||
|
|
3fd9c51086 | ||
|
|
de01087246 | ||
|
|
fe42bb47f7 | ||
|
|
c56bd519bb | ||
|
|
79b29d9437 | ||
|
|
82eecec277 | ||
|
|
ceacd077d2 | ||
|
|
5a0fb13ece | ||
|
|
78439b4c0c | ||
|
|
06f94f884f | ||
|
|
b8836c6404 | ||
|
|
ac79b86810 | ||
|
|
793c2ae947 | ||
|
|
cdcc5c6e35 | ||
|
|
51db81aa5c | ||
|
|
a51a185f49 | ||
|
|
90453fd07e | ||
|
|
d740bf84c3 | ||
|
|
d13d2677ea | ||
|
|
b076c98ba1 | ||
|
|
d071dea7f7 | ||
|
|
d9782c7b8a | ||
|
|
f85450d0b5 | ||
|
|
b129326ed6 | ||
|
|
eaf0d06b63 | ||
|
|
87f3e0a138 | ||
|
|
8e3c856a14 | ||
|
|
12c2439196 | ||
|
|
deb1e0ff34 | ||
|
|
808e8297b0 | ||
|
|
738ce56955 | ||
|
|
190fd0b93c | ||
|
|
ca6df26918 | ||
|
|
bcfeb97e4a | ||
|
|
0234957907 | ||
|
|
8713b74204 | ||
|
|
cbaddad358 | ||
|
|
2379544425 | ||
|
|
29fefba62e | ||
|
|
098382117e | ||
|
|
d816d73174 | ||
|
|
30eb78c293 | ||
|
|
a671b092ee | ||
|
|
0edf199282 | ||
|
|
2478555f0e | ||
|
|
b07080245d | ||
|
|
2ebf217bb0 | ||
|
|
bb527024d9 | ||
|
|
e897978c3e | ||
|
|
00f1c02532 | ||
|
|
348d1a2fda | ||
|
|
f1df8ba458 | ||
|
|
b5ea418933 | ||
|
|
734fa5a4e6 | ||
|
|
08f6d4b69b | ||
|
|
29d3bb9f9a | ||
|
|
4d217e642b | ||
|
|
bd56e03991 | ||
|
|
0b6aa0ddcd | ||
|
|
4f3496194d | ||
|
|
d09a680aaa | ||
|
|
56d7431d56 | ||
|
|
abae5f1626 | ||
|
|
7d0e94eecb | ||
|
|
23b65c7728 | ||
|
|
aa3182ebc5 | ||
|
|
32d27df0ba | ||
|
|
6439f0a5f3 | ||
|
|
19476632ff | ||
|
|
d4c12e4632 | ||
|
|
52bd48168f | ||
|
|
c0d935e232 | ||
|
|
24dfd47329 | ||
|
|
fbae338689 | ||
|
|
186fd88f8c | ||
|
|
14ff34c00a | ||
|
|
a66fa394d3 | ||
|
|
931766fe08 | ||
|
|
c134914896 | ||
|
|
25dac080a5 | ||
|
|
910d39eee4 | ||
|
|
d604ae5569 | ||
|
|
42f46b0fb1 | ||
|
|
abb5864224 | ||
|
|
2e2a2bd89a | ||
|
|
f8ee841921 | ||
|
|
ceda8c76d2 | ||
|
|
afe0b7443f | ||
|
|
9b773897d2 | ||
|
|
d6ec4c2c96 | ||
|
|
14ef169e99 | ||
|
|
22141f9706 | ||
|
|
a5c6fee5b4 | ||
|
|
d3a5a5c0a1 | ||
|
|
5d81869de4 | ||
|
|
73ebf95d89 | ||
|
|
9f4574f4ff | ||
|
|
cb239b20ab | ||
|
|
3ef79588b4 | ||
|
|
61000e386b | ||
|
|
53cb57901f | ||
|
|
993ff4d78e | ||
|
|
8fb10fbbf7 | ||
|
|
11e834f639 | ||
|
|
62bf2fbb9c | ||
|
|
e57930d6c2 | ||
|
|
e0c417a466 | ||
|
|
b55f8efed1 | ||
|
|
7cbc60d977 | ||
|
|
5b7912b558 | ||
|
|
57fca3e54d | ||
|
|
e31c27b123 | ||
|
|
74f1da818e | ||
|
|
910cfa601b | ||
|
|
fe321c3f8a | ||
|
|
43de0d405f | ||
|
|
ac6ed31c8e | ||
|
|
9d47437de4 | ||
|
|
eb7a62ff77 | ||
|
|
67bc16b46d | ||
|
|
8552a578a0 | ||
|
|
a5d277e045 | ||
|
|
6dbf2ac606 | ||
|
|
b1569ac2f3 | ||
|
|
3d0145b522 | ||
|
|
44174526d6 | ||
|
|
0fd395ea83 | ||
|
|
5e9d4a80a1 | ||
|
|
e4d234fe03 | ||
|
|
3202184718 | ||
|
|
41e576f4f1 | ||
|
|
d8dce07019 | ||
|
|
2b0a3144c7 | ||
|
|
62fbce0b5e | ||
|
|
5a59bb335c | ||
|
|
2719991630 | ||
|
|
6a3b8c4674 | ||
|
|
191fbf0177 | ||
|
|
228dd2952a | ||
|
|
97db38aa25 | ||
|
|
dc953a6e22 | ||
|
|
51e796a48d | ||
|
|
024f1425df | ||
|
|
a7ed610da9 | ||
|
|
7ba99f22cd | ||
|
|
b8ce09ec34 | ||
|
|
c243110a49 | ||
|
|
ee27636f32 | ||
|
|
f2f41c9c44 | ||
|
|
9312890e6a | ||
|
|
9578281b4f | ||
|
|
08690068fc | ||
|
|
e06a33de84 | ||
|
|
6a3db10fda | ||
|
|
bbed445efa | ||
|
|
9d65fb0bf2 | ||
|
|
34f03ca110 | ||
|
|
87c038f0c2 | ||
|
|
b3014f03b1 | ||
|
|
d39598c9fc | ||
|
|
5ea9106259 | ||
|
|
bcc0b59de1 | ||
|
|
5d6ed640f0 | ||
|
|
dd1cc2d025 | ||
|
|
52e5cc23e4 | ||
|
|
76a8e2be1f | ||
|
|
d989425490 | ||
|
|
1e324b7ed2 | ||
|
|
e68aa62f94 | ||
|
|
332b98a1ab | ||
|
|
dd05ef7974 | ||
|
|
d6862766d3 | ||
|
|
f52d005e2d | ||
|
|
bf475234a5 | ||
|
|
cd5985c056 | ||
|
|
ce33dbf823 | ||
|
|
0a9d0688a7 | ||
|
|
24784f2ce5 | ||
|
|
7a1e611b88 | ||
|
|
3073150008 | ||
|
|
9923def4cb | ||
|
|
a7f612303f | ||
|
|
64c2a2217a | ||
|
|
4689d7a952 | ||
|
|
87cd143967 | ||
|
|
e37fd05d58 | ||
|
|
acc708bda5 | ||
|
|
c7460bb69c | ||
|
|
84b273dab9 | ||
|
|
bb7ce2157e | ||
|
|
07b9e1d3a4 | ||
|
|
96a879d761 | ||
|
|
283127c3f4 | ||
|
|
beeee80a0b | ||
|
|
06b62826b4 | ||
|
|
d0736af209 | ||
|
|
716c8c1a5f | ||
|
|
e6cdda1bd9 | ||
|
|
2747a633bc | ||
|
|
74118f5cfe | ||
|
|
598bdf28bb | ||
|
|
d75f681c87 | ||
|
|
c7956ede6a | ||
|
|
64f5a69e84 | ||
|
|
bfb15c34b8 | ||
|
|
638b3ac0cd | ||
|
|
9d6147a037 | ||
|
|
802c786ac2 | ||
|
|
c8be8dbd9a | ||
|
|
7053b2bb37 | ||
|
|
447bf832cd | ||
|
|
7c4571b55e | ||
|
|
eb7c16aba5 | ||
|
|
b09e83b171 | ||
|
|
bb149a30a7 | ||
|
|
d5be35af49 | ||
|
|
f6aa56d92b | ||
|
|
6a4df15c47 | ||
|
|
72de5fdb1b | ||
|
|
a7f55d06af | ||
|
|
97da78d4e7 | ||
|
|
c4f6161c73 | ||
|
|
db7ffea24d | ||
|
|
489b5abf82 | ||
|
|
3a55c2ee07 | ||
|
|
64d866271c | ||
|
|
1ab2a80eab | ||
|
|
89d4c521ba | ||
|
|
f2e19d377a | ||
|
|
2b7b887b87 | ||
|
|
44c70b5d01 | ||
|
|
7514484c42 | ||
|
|
9594c4c99f | ||
|
|
56445c9753 | ||
|
|
07419fd5e1 | ||
|
|
2e4dd12b41 | ||
|
|
fed2046c49 | ||
|
|
db79db4786 | ||
|
|
6f027e3c57 | ||
|
|
bdb877009f | ||
|
|
6564ec1ff5 | ||
|
|
443dc067b3 | ||
|
|
6221650c5f | ||
|
|
034d0fd1f4 | ||
|
|
e617ff0460 | ||
|
|
4b1ed607a7 | ||
|
|
137365a670 | ||
|
|
1891a1b24f | ||
|
|
e57e070866 | ||
|
|
66998cd1ad | ||
|
|
c0b1833446 | ||
|
|
329a72c77c | ||
|
|
2610ee9d0c | ||
|
|
a13ca9034e | ||
|
|
5d1abb3689 | ||
|
|
e1d1c6d154 | ||
|
|
e18e0e7cd4 | ||
|
|
eaf3d07a3f | ||
|
|
c88ae32b7f | ||
|
|
605613e220 | ||
|
|
d2772000ec | ||
|
|
42939a79f5 | ||
|
|
ed17931117 | ||
|
|
66df5f7a1c | ||
|
|
fc6e6696e5 | ||
|
|
465748c8a1 | ||
|
|
e59cd71bbf | ||
|
|
8a76fea310 | ||
|
|
0e46be54ec | ||
|
|
dc81813fdf | ||
|
|
eaa0df16bb | ||
|
|
c23e911028 | ||
|
|
06b96a1007 | ||
|
|
fa545c591f | ||
|
|
e828b780c7 | ||
|
|
eca8c5cabd | ||
|
|
b7bce6008f | ||
|
|
2fdf89883d | ||
|
|
6c5d4bbaaa | ||
|
|
cb2f926d4f | ||
|
|
12c01b437e | ||
|
|
3253a58942 | ||
|
|
199f7f14ea | ||
|
|
d42406d765 | ||
|
|
2276ffb1f6 | ||
|
|
218fb3afb0 | ||
|
|
a9fb890979 | ||
|
|
54ebf5b455 | ||
|
|
c9a0475aa8 | ||
|
|
5567d9f88c | ||
|
|
56f3e661ae | ||
|
|
1aa4479a10 | ||
|
|
7b625d0a91 | ||
|
|
fd0529529d | ||
|
|
af43191954 | ||
|
|
2ce2ca7c91 | ||
|
|
a0fc3db665 | ||
|
|
feb458027f | ||
|
|
e5a5b7af5c | ||
|
|
ad456ae2fe | ||
|
|
690cb51f6c | ||
|
|
14aaa2f376 | ||
|
|
6e47ca2c41 | ||
|
|
0d99d2be9b | ||
|
|
c322ef00e7 | ||
|
|
3513421225 | ||
|
|
b0e6bfbefe | ||
|
|
f7a918730e | ||
|
|
cef33319c5 | ||
|
|
2036a59210 | ||
|
|
e5eccb6227 | ||
|
|
48c2c8567c | ||
|
|
bbeef0299f | ||
|
|
bec5584d63 | ||
|
|
bdc759d34c | ||
|
|
8db442d8ba | ||
|
|
9e7a0d4175 | ||
|
|
9c33b3f5a9 | ||
|
|
7e7e2c87dc | ||
|
|
2f741f35a8 | ||
|
|
c411466df7 | ||
|
|
9679939307 | ||
|
|
8539423b22 | ||
|
|
81edafdf09 | ||
|
|
e0a262882a | ||
|
|
89237ab99e | ||
|
|
0f414e451e | ||
|
|
1180522725 | ||
|
|
81c7ebf123 | ||
|
|
258f05e6f4 | ||
|
|
53efb1c153 | ||
|
|
26014a9705 | ||
|
|
00ef037e45 | ||
|
|
669ec74e67 | ||
|
|
c4528200b0 | ||
|
|
ba7cd0250a | ||
|
|
c5e97678a1 | ||
|
|
337a46cdcc | ||
|
|
7f74b67f1f | ||
|
|
5dcc48d2e5 | ||
|
|
8b04aab07d | ||
|
|
eab4f6cf2e | ||
|
|
7f8d623283 | ||
|
|
dbffed8f1f | ||
|
|
7e3688fdd0 | ||
|
|
2e111e9ad3 | ||
|
|
6d6070ff3f | ||
|
|
391bbde353 | ||
|
|
3c56eb3762 | ||
|
|
7c14ea354b | ||
|
|
c96aad0b77 | ||
|
|
a9dd3e424b | ||
|
|
8a144a4046 | ||
|
|
75f86d7267 | ||
|
|
bbf875fc2f | ||
|
|
59d491f61b | ||
|
|
ed640a1324 | ||
|
|
e86fbcaef7 | ||
|
|
7f48212054 | ||
|
|
a2c5c71baf | ||
|
|
b904f81cb9 | ||
|
|
d64fe374dd | ||
|
|
fe25e7938e | ||
|
|
931df361bf | ||
|
|
d7c45f4aee | ||
|
|
5e5bef581b | ||
|
|
2d9e95d812 | ||
|
|
e5f979d106 | ||
|
|
c7a5815203 | ||
|
|
03e268722e | ||
|
|
78a2774329 | ||
|
|
c1b5ab7f53 | ||
|
|
b861d97ad4 | ||
|
|
f3abcc9dd6 | ||
|
|
cab13fe018 | ||
|
|
cc4b19c7ce | ||
|
|
a754d9aee5 | ||
|
|
22b54b2d8d | ||
|
|
d12ca6301a | ||
|
|
bc1b2ad9ab | ||
|
|
1782ab1514 | ||
|
|
0384fc50e3 | ||
|
|
cc46dee9ee | ||
|
|
ed5a0ae45a | ||
|
|
928ccfefb8 | ||
|
|
7f6bfb7b3e | ||
|
|
bcbc9bf675 | ||
|
|
0ec4366f4c | ||
|
|
ff72b7eea1 | ||
|
|
a32ca19251 | ||
|
|
b79508956a | ||
|
|
d76c5bd658 | ||
|
|
580e11126c | ||
|
|
736d40546a | ||
|
|
88810d2bb5 | ||
|
|
3a8f4d2ffb | ||
|
|
1fe125a65f | ||
|
|
0ff4df0836 | ||
|
|
16b4775e2d | ||
|
|
c3a13b8a29 | ||
|
|
d1053375b7 | ||
|
|
0fa4538256 | ||
|
|
738644f288 | ||
|
|
2f80b055ac | ||
|
|
fd62a1df10 | ||
|
|
a85d0ebd0a | ||
|
|
2c06902baa | ||
|
|
76ac6429fe | ||
|
|
43cae66b0d | ||
|
|
dacddecc7d | ||
|
|
dcb9267c2f | ||
|
|
ff35fd90fa | ||
|
|
7469377079 | ||
|
|
c8441f8d38 | ||
|
|
abf4eb0ffc | ||
|
|
93717cc830 | ||
|
|
b629bc81f8 | ||
|
|
f628897fe1 | ||
|
|
54b82a78e3 | ||
|
|
377faf145f | ||
|
|
69e316948f | ||
|
|
62cbff4f53 | ||
|
|
5582265e9d | ||
|
|
fb5ea3c324 | ||
|
|
9b5f676f50 | ||
|
|
88cfc0fa7e | ||
|
|
665bfa2f13 | ||
|
|
b89b1a64f4 | ||
|
|
9ba657c261 | ||
|
|
bce958b8e6 | ||
|
|
914012de2b | ||
|
|
8d1c476aed | ||
|
|
567c729e9e | ||
|
|
3f03dd20e4 | ||
|
|
1c778354da | ||
|
|
3a149fa459 | ||
|
|
f3b121950d | ||
|
|
43c13b7ba1 | ||
|
|
9447b33800 | ||
|
|
2934752eeb | ||
|
|
dd6d8c71fd | ||
|
|
80267c389b | ||
|
|
acfbaf75d5 | ||
|
|
5f54377407 | ||
|
|
552aa64741 | ||
|
|
d64f611f51 | ||
|
|
a96cc92d77 | ||
|
|
3858cccc41 | ||
|
|
072828512a | ||
|
|
a73ffe5642 | ||
|
|
8e784a5b6d | ||
|
|
1b6f9332f1 | ||
|
|
db8b472729 | ||
|
|
867b371522 | ||
|
|
c0d7c9fc7d | ||
|
|
bb4685cf90 | ||
|
|
6a95426749 | ||
|
|
ef6af8e84d | ||
|
|
763130f253 | ||
|
|
1256c040e9 | ||
|
|
18b7b48a99 | ||
|
|
627c11503f | ||
|
|
712ba84f06 | ||
|
|
5186e029b3 | ||
|
|
5bfaedf903 | ||
|
|
5061da6897 | ||
|
|
c159a28016 | ||
|
|
82a1b1c921 | ||
|
|
bf2210d0f4 | ||
|
|
8f0772cb94 | ||
|
|
5b57079ecd | ||
|
|
350d759517 | ||
|
|
edd793c9f5 | ||
|
|
545c2dc685 | ||
|
|
84955c066c | ||
|
|
06dd03b170 | ||
|
|
47bc2ed2dc | ||
|
|
44281afc54 | ||
|
|
4d2859d145 | ||
|
|
45d44a1669 | ||
|
|
ddd83b340e | ||
|
|
ccdb54d7c3 | ||
|
|
bcc246d950 | ||
|
|
62139e252a | ||
|
|
86950c3a0a | ||
|
|
f4865ef68d | ||
|
|
ea7209e7ae | ||
|
|
998c551cf3 | ||
|
|
e6f29b0116 | ||
|
|
eb90bb39dc | ||
|
|
ad189b35ad | ||
|
|
7d2989a233 | ||
|
|
862137ae7d | ||
|
|
c86e082d9a | ||
|
|
80fe048f97 | ||
|
|
f2bffb3ce7 | ||
|
|
cbe2f9eef8 | ||
|
|
688f41f570 | ||
|
|
a29197637e | ||
|
|
7a2712a37f | ||
|
|
189f5cfd8c | ||
|
|
e509480892 | ||
|
|
7f7955351a | ||
|
|
46f1db21a8 | ||
|
|
fbe7bc6951 | ||
|
|
f658507847 | ||
|
|
374078683b | ||
|
|
114c4e0886 | ||
|
|
67c62766d4 | ||
|
|
3f2947158d | ||
|
|
278a7cb356 | ||
|
|
890158a79c | ||
|
|
4dc1602b77 | ||
|
|
bbba0abac9 | ||
|
|
d04fd807c6 | ||
|
|
3456df4cf1 | ||
|
|
f56aaa791e | ||
|
|
465a758770 | ||
|
|
0f7c0c1b2c | ||
|
|
bf8d10b6f6 | ||
|
|
20d04553d6 | ||
|
|
b56d62e3c4 | ||
|
|
9a332dcba1 | ||
|
|
166d9f8823 | ||
|
|
42f5eed75f | ||
|
|
01a7db18dd | ||
|
|
d4507465a3 | ||
|
|
3ac92ed10a | ||
|
|
43c76ca85c | ||
|
|
54d87fa96a | ||
|
|
f041f17268 | ||
|
|
31c80a6967 | ||
|
|
783ce136f4 | ||
|
|
f829145781 | ||
|
|
389337f8cd | ||
|
|
a0713c2d66 | ||
|
|
f94d3cbce4 | ||
|
|
8d8994b468 | ||
|
|
784a9097a5 | ||
|
|
b9601626e3 | ||
|
|
dc80b011f2 | ||
|
|
ee7d32d460 | ||
|
|
43fd9ee94e | ||
|
|
8821a91f3f | ||
|
|
98d9256f92 | ||
|
|
b35495eaa7 | ||
|
|
74d6b614b3 | ||
|
|
dd63c16a74 | ||
|
|
4280266a96 | ||
|
|
b1f02098ff | ||
|
|
95189b574a | ||
|
|
c5d23503bf | ||
|
|
77950f6069 | ||
|
|
ec5f2b3753 | ||
|
|
9e7104fb7f | ||
|
|
6b3b6ca45e | ||
|
|
20b8b0b24e | ||
|
|
4e11540458 | ||
|
|
ee87f2676d | ||
|
|
74a90aab98 | ||
|
|
48ff9a5100 | ||
|
|
3dfd578ee5 | ||
|
|
0db46cdc81 | ||
|
|
fdac58d031 | ||
|
|
df9d4ce856 | ||
|
|
e6ae4e97e8 | ||
|
|
10a4c28922 | ||
|
|
8a828c6e51 | ||
|
|
d7b40905ff | ||
|
|
f9a3b5f3cd | ||
|
|
b73b89242f | ||
|
|
23a0f6e8de | ||
|
|
87967abc3f | ||
|
|
ce60c286dc | ||
|
|
90fd9b0eb8 | ||
|
|
ca262a6797 | ||
|
|
c056d39775 | ||
|
|
1c4426ea4b | ||
|
|
36520bd7a1 | ||
|
|
badf0ace76 | ||
|
|
f1f61249e0 | ||
|
|
b371cac18c | ||
|
|
1846535d8d | ||
|
|
d7d9118b9b |
62
.env
62
.env
@@ -3,14 +3,18 @@
|
||||
# For production, it is recommended to use a secure method to store these variables and change the default secret keys.
|
||||
|
||||
#### Prowler UI Configuration ####
|
||||
PROWLER_UI_VERSION="latest"
|
||||
SITE_URL=http://localhost:3000
|
||||
PROWLER_UI_VERSION="stable"
|
||||
AUTH_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
NEXT_PUBLIC_API_BASE_URL=${API_BASE_URL}
|
||||
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
|
||||
AUTH_TRUST_HOST=true
|
||||
UI_PORT=3000
|
||||
# openssl rand -base64 32
|
||||
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
|
||||
# Google Tag Manager ID
|
||||
NEXT_PUBLIC_GOOGLE_TAG_MANAGER_ID=""
|
||||
|
||||
|
||||
#### Prowler API Configuration ####
|
||||
PROWLER_API_VERSION="stable"
|
||||
@@ -24,12 +28,40 @@ POSTGRES_USER=prowler
|
||||
POSTGRES_PASSWORD=postgres
|
||||
POSTGRES_DB=prowler_db
|
||||
|
||||
# Celery-Prowler task settings
|
||||
TASK_RETRY_DELAY_SECONDS=0.1
|
||||
TASK_RETRY_ATTEMPTS=5
|
||||
|
||||
# Valkey settings
|
||||
# If running Valkey and celery on host, use localhost, else use 'valkey'
|
||||
VALKEY_HOST=valkey
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_DB=0
|
||||
|
||||
# API scan settings
|
||||
|
||||
# The path to the directory where scan output should be stored
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY="/tmp/prowler_api_output"
|
||||
|
||||
# The maximum number of findings to process in a single batch
|
||||
DJANGO_FINDINGS_BATCH_SIZE=1000
|
||||
|
||||
# The AWS access key to be used when uploading scan output to an S3 bucket
|
||||
# If left empty, default AWS credentials resolution behavior will be used
|
||||
DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID=""
|
||||
|
||||
# The AWS secret key to be used when uploading scan output to an S3 bucket
|
||||
DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY=""
|
||||
|
||||
# An optional AWS session token
|
||||
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN=""
|
||||
|
||||
# The AWS region where your S3 bucket is located (e.g., "us-east-1")
|
||||
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION=""
|
||||
|
||||
# The name of the S3 bucket where scan output should be stored
|
||||
DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET=""
|
||||
|
||||
# Django settings
|
||||
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api
|
||||
DJANGO_BIND_ADDRESS=0.0.0.0
|
||||
@@ -92,3 +124,29 @@ jQIDAQAB
|
||||
# openssl rand -base64 32
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
|
||||
DJANGO_SENTRY_DSN=
|
||||
|
||||
# Sentry settings
|
||||
SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.7.5
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
SOCIAL_GITHUB_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/github"
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
# Single Sign-On (SSO)
|
||||
SAML_SSO_CALLBACK_URL="${AUTH_URL}/api/auth/callback/saml"
|
||||
|
||||
# Lighthouse tracing
|
||||
LANGSMITH_TRACING=false
|
||||
LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
|
||||
LANGSMITH_API_KEY=""
|
||||
LANGCHAIN_PROJECT=""
|
||||
|
||||
156
.github/dependabot.yml
vendored
156
.github/dependabot.yml
vendored
@@ -9,108 +9,112 @@ updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/api"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "component/api"
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/api"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "component/api"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/ui"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "npm"
|
||||
- "component/ui"
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "npm"
|
||||
# directory: "/ui"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "npm"
|
||||
# - "component/ui"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/04/15
|
||||
# v4.6
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v4"
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v4"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v4"
|
||||
# - package-ecosystem: "github-actions"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "github_actions"
|
||||
# - "v4"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
- "v4"
|
||||
# - package-ecosystem: "docker"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "docker"
|
||||
# - "v4"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# v3
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v3"
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v3"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v3"
|
||||
# - package-ecosystem: "github-actions"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "github_actions"
|
||||
# - "v3"
|
||||
|
||||
15
.github/labeler.yml
vendored
15
.github/labeler.yml
vendored
@@ -27,6 +27,11 @@ provider/github:
|
||||
- any-glob-to-any-file: "prowler/providers/github/**"
|
||||
- any-glob-to-any-file: "tests/providers/github/**"
|
||||
|
||||
provider/iac:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/iac/**"
|
||||
- any-glob-to-any-file: "tests/providers/iac/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
@@ -92,3 +97,13 @@ component/api:
|
||||
component/ui:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "ui/**"
|
||||
|
||||
compliance:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/compliance/**"
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/compliance/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/compliance/**"
|
||||
|
||||
review-django-migrations:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "api/src/backend/api/migrations/**"
|
||||
|
||||
9
.github/pull_request_template.md
vendored
9
.github/pull_request_template.md
vendored
@@ -15,7 +15,14 @@ Please include a summary of the change and which issue is fixed. List any depend
|
||||
- [ ] Review if the code is being covered by tests.
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
- [ ] Review if backport is needed.
|
||||
- [ ] Review if is needed to change the [Readme.md](https://github.com/prowler-cloud/prowler/blob/master/README.md)
|
||||
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/prowler/CHANGELOG.md), if applicable.
|
||||
|
||||
#### API
|
||||
- [ ] Verify if API specs need to be regenerated.
|
||||
- [ ] Check if version updates are required (e.g., specs, Poetry, etc.).
|
||||
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/api/CHANGELOG.md), if applicable.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
@@ -6,6 +6,7 @@ on:
|
||||
- "master"
|
||||
paths:
|
||||
- "api/**"
|
||||
- "prowler/**"
|
||||
- ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the code below to test this action on PRs
|
||||
@@ -61,33 +62,40 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
@@ -96,3 +104,12 @@ jobs:
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-api-deploy
|
||||
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'
|
||||
|
||||
6
.github/workflows/api-codeql.yml
vendored
6
.github/workflows/api-codeql.yml
vendored
@@ -44,16 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
75
.github/workflows/api-pull-request.yml
vendored
75
.github/workflows/api-pull-request.yml
vendored
@@ -6,6 +6,7 @@ on:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- ".github/workflows/api-pull-request.yml"
|
||||
- "api/**"
|
||||
pull_request:
|
||||
branches:
|
||||
@@ -14,7 +15,6 @@ on:
|
||||
paths:
|
||||
- "api/**"
|
||||
|
||||
|
||||
env:
|
||||
POSTGRES_HOST: localhost
|
||||
POSTGRES_PORT: 5432
|
||||
@@ -26,7 +26,12 @@ env:
|
||||
VALKEY_HOST: localhost
|
||||
VALKEY_PORT: 6379
|
||||
VALKEY_DB: 0
|
||||
|
||||
API_WORKING_DIR: ./api
|
||||
IMAGE_NAME: prowler-api
|
||||
IGNORE_FILES: |
|
||||
api/docs/**
|
||||
api/README.md
|
||||
api/CHANGELOG.md
|
||||
|
||||
jobs:
|
||||
test:
|
||||
@@ -70,39 +75,54 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v45
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: |
|
||||
api/.github/**
|
||||
api/docs/**
|
||||
api/permissions/**
|
||||
api/README.md
|
||||
api/mkdocs.yml
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
|
||||
- name: Replace @master with current branch in pyproject.toml
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
|
||||
echo "Using branch: $BRANCH_NAME"
|
||||
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
|
||||
|
||||
- name: Install poetry
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==1.8.5
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Update poetry.lock after the branch name change
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install system dependencies for xmlsec
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxml2-dev libxmlsec1-dev libxmlsec1-openssl pkg-config
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
@@ -143,8 +163,9 @@ jobs:
|
||||
- name: Safety
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963
|
||||
poetry run safety check --ignore 70612,66963,74429,76352,76353,77323
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
@@ -166,8 +187,32 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: api
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- name: Build Container
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:latest
|
||||
outputs: type=docker
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
4
.github/workflows/backport.yml
vendored
4
.github/workflows/backport.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check labels
|
||||
id: preview_label_check
|
||||
uses: docker://agilepathway/pull-request-label-checker:v1.6.55
|
||||
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
|
||||
with:
|
||||
allow_failure: true
|
||||
prefix_mode: true
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
- name: Backport Action
|
||||
if: steps.preview_label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@v9.5.1
|
||||
uses: sorenlouv/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Leave PR comment with the Prowler Documentation URI
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
body: |
|
||||
|
||||
23
.github/workflows/conventional-commit.yml
vendored
Normal file
23
.github/workflows/conventional-commit.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: Prowler - Conventional Commit
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- "opened"
|
||||
- "edited"
|
||||
- "synchronize"
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
|
||||
jobs:
|
||||
conventional-commit-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: conventional-commit-check
|
||||
id: conventional-commit-check
|
||||
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
|
||||
with:
|
||||
pr-title-regex: '^([^\s(]+)(?:\(([^)]+)\))?: (.+)'
|
||||
67
.github/workflows/create-backport-label.yml
vendored
Normal file
67
.github/workflows/create-backport-label.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
name: Create Backport Label
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
create_label:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Create backport label
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
OWNER_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
VERSION_ONLY=${RELEASE_TAG#v} # Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
|
||||
|
||||
# Check if it's a minor version (X.Y.0)
|
||||
if [[ "$VERSION_ONLY" =~ ^[0-9]+\.[0-9]+\.0$ ]]; then
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is a minor version. Proceeding to create backport label."
|
||||
|
||||
TWO_DIGIT_VERSION=${VERSION_ONLY%.0} # Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
|
||||
|
||||
FINAL_LABEL_NAME="backport-to-v${TWO_DIGIT_VERSION}"
|
||||
FINAL_DESCRIPTION="Backport PR to the v${TWO_DIGIT_VERSION} branch"
|
||||
|
||||
echo "Effective label name will be: ${FINAL_LABEL_NAME}"
|
||||
echo "Effective description will be: ${FINAL_DESCRIPTION}"
|
||||
|
||||
# Check if the label already exists
|
||||
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token ${GITHUB_TOKEN}" "https://api.github.com/repos/${OWNER_REPO}/labels/${FINAL_LABEL_NAME}")
|
||||
|
||||
if [ "${STATUS_CODE}" -eq 200 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' already exists."
|
||||
elif [ "${STATUS_CODE}" -eq 404 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' does not exist. Creating it..."
|
||||
# Prepare JSON data payload
|
||||
JSON_DATA=$(printf '{"name":"%s","description":"%s","color":"B60205"}' "${FINAL_LABEL_NAME}" "${FINAL_DESCRIPTION}")
|
||||
|
||||
CREATE_STATUS_CODE=$(curl -s -o /tmp/curl_create_response.json -w "%{http_code}" -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GITHUB_TOKEN}" \
|
||||
--data "${JSON_DATA}" \
|
||||
"https://api.github.com/repos/${OWNER_REPO}/labels")
|
||||
|
||||
CREATE_RESPONSE_BODY=$(cat /tmp/curl_create_response.json)
|
||||
rm -f /tmp/curl_create_response.json
|
||||
|
||||
if [ "$CREATE_STATUS_CODE" -eq 201 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' created successfully."
|
||||
else
|
||||
echo "Error creating label '${FINAL_LABEL_NAME}'. Status: $CREATE_STATUS_CODE"
|
||||
echo "Response: $CREATE_RESPONSE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error checking for label '${FINAL_LABEL_NAME}'. HTTP Status: ${STATUS_CODE}"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is not a minor version. Skipping backport label creation."
|
||||
exit 0
|
||||
fi
|
||||
4
.github/workflows/find-secrets.yml
vendored
4
.github/workflows/find-secrets.yml
vendored
@@ -7,11 +7,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.88.2
|
||||
uses: trufflesecurity/trufflehog@6641d4ba5b684fffe195b9820345de1bf19f3181 # v3.89.2
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
2
.github/workflows/labeler.yml
vendored
2
.github/workflows/labeler.yml
vendored
@@ -14,4 +14,4 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
|
||||
215
.github/workflows/prowler-release-preparation.yml
vendored
Normal file
215
.github/workflows/prowler-release-preparation.yml
vendored
Normal file
@@ -0,0 +1,215 @@
|
||||
name: Prowler Release Preparation
|
||||
|
||||
run-name: Prowler Release Preparation for ${{ inputs.prowler_version }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
prowler_version:
|
||||
description: 'Prowler version to release (e.g., 5.9.0)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.inputs.prowler_version }}
|
||||
|
||||
jobs:
|
||||
prepare-release:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Parse version and determine branch
|
||||
run: |
|
||||
# Validate version format (reusing pattern from sdk-bump-version.yml)
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
PATCH_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
# Export version components to environment
|
||||
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PATCH_VERSION=${PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Determine branch name (format: v5.9)
|
||||
BRANCH_NAME="v${MAJOR_VERSION}.${MINOR_VERSION}"
|
||||
echo "BRANCH_NAME=${BRANCH_NAME}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Calculate UI version (1.X.X format - matches Prowler minor version)
|
||||
UI_VERSION="1.${MINOR_VERSION}.${PATCH_VERSION}"
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Calculate API version (1.X.X format - one minor version ahead)
|
||||
API_MINOR_VERSION=$((MINOR_VERSION + 1))
|
||||
API_VERSION="1.${API_MINOR_VERSION}.${PATCH_VERSION}"
|
||||
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Prowler version: $PROWLER_VERSION"
|
||||
echo "Branch name: $BRANCH_NAME"
|
||||
echo "UI version: $UI_VERSION"
|
||||
echo "API version: $API_VERSION"
|
||||
echo "Is minor release: $([ $PATCH_VERSION -eq 0 ] && echo 'true' || echo 'false')"
|
||||
else
|
||||
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Checkout existing branch for patch release
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
run: |
|
||||
echo "Patch release detected, checking out existing branch $BRANCH_NAME..."
|
||||
if git show-ref --verify --quiet "refs/heads/$BRANCH_NAME"; then
|
||||
echo "Branch $BRANCH_NAME exists locally, checking out..."
|
||||
git checkout "$BRANCH_NAME"
|
||||
elif git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
|
||||
echo "Branch $BRANCH_NAME exists remotely, checking out..."
|
||||
git checkout -b "$BRANCH_NAME" "origin/$BRANCH_NAME"
|
||||
else
|
||||
echo "ERROR: Branch $BRANCH_NAME should exist for patch release $PROWLER_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify version in pyproject.toml
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^version = ' pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: Version mismatch in pyproject.toml (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ pyproject.toml version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in prowler/config/config.py
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^prowler_version = ' prowler/config/config.py | sed -E 's/prowler_version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: Version mismatch in prowler/config/config.py (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ prowler/config/config.py version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in api/pyproject.toml
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep '^version = ' api/pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: API version mismatch in api/pyproject.toml (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Verify prowler dependency in api/pyproject.toml
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_PROWLER_REF" != "$PROWLER_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: Prowler dependency mismatch in api/pyproject.toml (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_PROWLER_REF')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
|
||||
|
||||
- name: Verify version in api/src/backend/api/v1/views.py
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep 'spectacular_settings.VERSION = ' api/src/backend/api/v1/views.py | sed -E 's/.*spectacular_settings.VERSION = "([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: API version mismatch in views.py (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/src/backend/api/v1/views.py version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Create release branch for minor release
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
echo "Minor release detected (patch = 0), creating new branch $BRANCH_NAME..."
|
||||
if git show-ref --verify --quiet "refs/heads/$BRANCH_NAME" || git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
|
||||
echo "ERROR: Branch $BRANCH_NAME already exists for minor release $PROWLER_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
|
||||
- name: Extract changelog entries
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Function to extract changelog for a specific version
|
||||
extract_changelog() {
|
||||
local file="$1"
|
||||
local version="$2"
|
||||
local output_file="$3"
|
||||
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: $file not found, skipping..."
|
||||
touch "$output_file"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract changelog section for this version
|
||||
awk -v version="$version" '
|
||||
/^## \[v?'"$version"'\]/ { found=1; next }
|
||||
found && /^## \[v?[0-9]+\.[0-9]+\.[0-9]+\]/ { found=0 }
|
||||
found && !/^## \[v?'"$version"'\]/ { print }
|
||||
' "$file" > "$output_file"
|
||||
|
||||
# Remove --- separators
|
||||
sed -i '/^---$/d' "$output_file"
|
||||
|
||||
# Remove trailing empty lines
|
||||
sed -i '/^$/d' "$output_file"
|
||||
}
|
||||
|
||||
# Extract changelogs
|
||||
echo "Extracting changelog entries..."
|
||||
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
|
||||
# Combine changelogs in order: UI, API, SDK
|
||||
> combined_changelog.md
|
||||
|
||||
if [ -s "ui_changelog.md" ]; then
|
||||
echo "## UI" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat ui_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ -s "api_changelog.md" ]; then
|
||||
echo "## API" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat api_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ -s "prowler_changelog.md" ]; then
|
||||
echo "## SDK" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat prowler_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
echo "Combined changelog preview:"
|
||||
cat combined_changelog.md
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
|
||||
with:
|
||||
tag_name: ${{ env.PROWLER_VERSION }}
|
||||
name: Prowler ${{ env.PROWLER_VERSION }}
|
||||
body_path: combined_changelog.md
|
||||
draft: true
|
||||
target_commitish: ${{ env.PATCH_VERSION == '0' && 'master' || env.BRANCH_NAME }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Clean up temporary files
|
||||
run: |
|
||||
rm -f prowler_changelog.md api_changelog.md ui_changelog.md combined_changelog.md
|
||||
86
.github/workflows/pull-request-check-changelog.yml
vendored
Normal file
86
.github/workflows/pull-request-check-changelog.yml
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
name: Check Changelog
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
|
||||
jobs:
|
||||
check-changelog:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: "api ui prowler"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get list of changed files
|
||||
id: changed_files
|
||||
run: |
|
||||
git fetch origin ${{ github.base_ref }}
|
||||
git diff --name-only origin/${{ github.base_ref }}...HEAD > changed_files.txt
|
||||
cat changed_files.txt
|
||||
|
||||
- name: Check for folder changes and changelog presence
|
||||
id: check_folders
|
||||
run: |
|
||||
missing_changelogs=""
|
||||
|
||||
for folder in $MONITORED_FOLDERS; do
|
||||
if grep -q "^${folder}/" changed_files.txt; then
|
||||
echo "Detected changes in ${folder}/"
|
||||
if ! grep -q "^${folder}/CHANGELOG.md$" changed_files.txt; then
|
||||
echo "No changelog update found for ${folder}/"
|
||||
missing_changelogs="${missing_changelogs}- \`${folder}\`\n"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "missing_changelogs<<EOF" >> $GITHUB_OUTPUT
|
||||
echo -e "${missing_changelogs}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find_comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e #v3.1.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- changelog-check -->'
|
||||
|
||||
- name: Comment on PR if changelog is missing
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs != ''
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
|
||||
|
||||
${{ steps.check_folders.outputs.missing_changelogs }}
|
||||
|
||||
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.
|
||||
|
||||
- name: Comment on PR if all changelogs are present
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs == ''
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
✅ All necessary `CHANGELOG.md` files have been updated. Great job! 🎉
|
||||
|
||||
- name: Fail if changelog is missing
|
||||
if: steps.check_folders.outputs.missing_changelogs != ''
|
||||
run: |
|
||||
echo "ERROR: Missing changelog updates in some folders."
|
||||
exit 1
|
||||
37
.github/workflows/pull-request-merged.yml
vendored
Normal file
37
.github/workflows/pull-request-merged.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
name: Prowler - Merged Pull Request
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['closed']
|
||||
|
||||
jobs:
|
||||
trigger-cloud-pull-request:
|
||||
name: Trigger Cloud Pull Request
|
||||
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.merge_commit_sha }}
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.event.pull_request.merge_commit_sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Trigger pull request
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-pull-request-merged
|
||||
client-payload: '{
|
||||
"PROWLER_COMMIT_SHA": "${{ github.event.pull_request.merge_commit_sha }}",
|
||||
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
|
||||
"PROWLER_PR_TITLE": "${{ github.event.pull_request.title }}",
|
||||
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
|
||||
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
|
||||
"PROWLER_PR_URL":${{ toJson(github.event.pull_request.html_url) }}
|
||||
}'
|
||||
@@ -59,16 +59,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry==1.8.5
|
||||
pipx install poetry==2.*
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version
|
||||
@@ -108,13 +108,13 @@ jobs:
|
||||
esac
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
@@ -123,11 +123,11 @@ jobs:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
@@ -140,7 +140,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
|
||||
147
.github/workflows/sdk-bump-version.yml
vendored
Normal file
147
.github/workflows/sdk-bump-version.yml
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
name: SDK - Bump Version
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
jobs:
|
||||
bump-version:
|
||||
name: Bump Version
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Get Prowler version
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
FIX_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
# Export version components to GitHub environment
|
||||
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "FIX_VERSION=${FIX_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
if (( MAJOR_VERSION == 5 )); then
|
||||
if (( FIX_VERSION == 0 )); then
|
||||
echo "Minor Release: $PROWLER_VERSION"
|
||||
|
||||
# Set up next minor version for master
|
||||
BUMP_VERSION_TO=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).${FIX_VERSION}
|
||||
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
TARGET_BRANCH=${BASE_BRANCH}
|
||||
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Set up patch version for version branch
|
||||
PATCH_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.1
|
||||
echo "PATCH_VERSION_TO=${PATCH_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Bumping to next minor version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
|
||||
echo "Bumping to next patch version: ${PATCH_VERSION_TO} in branch ${VERSION_BRANCH}"
|
||||
else
|
||||
echo "Patch Release: $PROWLER_VERSION"
|
||||
|
||||
BUMP_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.$((FIX_VERSION + 1))
|
||||
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
TARGET_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Bumping to next patch version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
|
||||
fi
|
||||
else
|
||||
echo "Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Bump versions in files
|
||||
run: |
|
||||
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
|
||||
echo "Using BUMP_VERSION_TO=$BUMP_VERSION_TO"
|
||||
|
||||
set -e
|
||||
|
||||
echo "Bumping version in pyproject.toml ..."
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${BUMP_VERSION_TO}\"|" pyproject.toml
|
||||
|
||||
echo "Bumping version in prowler/config/config.py ..."
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${BUMP_VERSION_TO}\"|" prowler/config/config.py
|
||||
|
||||
echo "Bumping version in .env ..."
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${BUMP_VERSION_TO}|" .env
|
||||
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.TARGET_BRANCH }}
|
||||
commit-message: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.BUMP_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
labels: no-changelog
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler version to v${{ env.BUMP_VERSION_TO }}
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Handle patch version for minor release
|
||||
if: env.FIX_VERSION == '0'
|
||||
run: |
|
||||
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
|
||||
echo "Using PATCH_VERSION_TO=$PATCH_VERSION_TO"
|
||||
|
||||
set -e
|
||||
|
||||
echo "Bumping version in pyproject.toml ..."
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${PATCH_VERSION_TO}\"|" pyproject.toml
|
||||
|
||||
echo "Bumping version in prowler/config/config.py ..."
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${PATCH_VERSION_TO}\"|" prowler/config/config.py
|
||||
|
||||
echo "Bumping version in .env ..."
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PATCH_VERSION_TO}|" .env
|
||||
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create Pull Request for patch version
|
||||
if: env.FIX_VERSION == '0'
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.PATCH_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
labels: no-changelog
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler version to v${{ env.PATCH_VERSION_TO }}
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
8
.github/workflows/sdk-codeql.yml
vendored
8
.github/workflows/sdk-codeql.yml
vendored
@@ -21,6 +21,7 @@ on:
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
@@ -30,6 +31,7 @@ on:
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
@@ -50,16 +52,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
156
.github/workflows/sdk-pull-request.yml
vendored
156
.github/workflows/sdk-pull-request.yml
vendored
@@ -21,11 +21,11 @@ jobs:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v45
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -34,21 +34,24 @@ jobs:
|
||||
permissions/**
|
||||
api/**
|
||||
ui/**
|
||||
prowler/CHANGELOG.md
|
||||
README.md
|
||||
mkdocs.yml
|
||||
.backportrc.json
|
||||
.env
|
||||
docker-compose*
|
||||
examples/**
|
||||
.gitignore
|
||||
|
||||
- name: Install poetry
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==1.8.5
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
@@ -56,7 +59,7 @@ jobs:
|
||||
- name: Install dependencies
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
@@ -99,20 +102,155 @@ jobs:
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
|
||||
|
||||
- name: Dockerfile - Check if Dockerfile has changed
|
||||
id: dockerfile-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
Dockerfile
|
||||
|
||||
- name: Hadolint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
if: steps.dockerfile-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
- name: Test with pytest
|
||||
# Test AWS
|
||||
- name: AWS - Check if any file has changed
|
||||
id: aws-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/aws/**
|
||||
./tests/providers/aws/**
|
||||
.poetry.lock
|
||||
|
||||
- name: AWS - Test
|
||||
if: steps.aws-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
|
||||
|
||||
# Test Azure
|
||||
- name: Azure - Check if any file has changed
|
||||
id: azure-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/azure/**
|
||||
./tests/providers/azure/**
|
||||
.poetry.lock
|
||||
|
||||
- name: Azure - Test
|
||||
if: steps.azure-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/azure --cov-report=xml:azure_coverage.xml tests/providers/azure
|
||||
|
||||
# Test GCP
|
||||
- name: GCP - Check if any file has changed
|
||||
id: gcp-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/gcp/**
|
||||
./tests/providers/gcp/**
|
||||
.poetry.lock
|
||||
|
||||
- name: GCP - Test
|
||||
if: steps.gcp-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/gcp --cov-report=xml:gcp_coverage.xml tests/providers/gcp
|
||||
|
||||
# Test Kubernetes
|
||||
- name: Kubernetes - Check if any file has changed
|
||||
id: kubernetes-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/kubernetes/**
|
||||
./tests/providers/kubernetes/**
|
||||
.poetry.lock
|
||||
|
||||
- name: Kubernetes - Test
|
||||
if: steps.kubernetes-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/kubernetes --cov-report=xml:kubernetes_coverage.xml tests/providers/kubernetes
|
||||
|
||||
# Test GitHub
|
||||
- name: GitHub - Check if any file has changed
|
||||
id: github-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/github/**
|
||||
./tests/providers/github/**
|
||||
.poetry.lock
|
||||
|
||||
- name: GitHub - Test
|
||||
if: steps.github-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/github --cov-report=xml:github_coverage.xml tests/providers/github
|
||||
|
||||
# Test NHN
|
||||
- name: NHN - Check if any file has changed
|
||||
id: nhn-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/nhn/**
|
||||
./tests/providers/nhn/**
|
||||
.poetry.lock
|
||||
|
||||
- name: NHN - Test
|
||||
if: steps.nhn-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/nhn --cov-report=xml:nhn_coverage.xml tests/providers/nhn
|
||||
|
||||
# Test M365
|
||||
- name: M365 - Check if any file has changed
|
||||
id: m365-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/m365/**
|
||||
./tests/providers/m365/**
|
||||
.poetry.lock
|
||||
|
||||
- name: M365 - Test
|
||||
if: steps.m365-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
|
||||
|
||||
# Test IaC
|
||||
- name: IaC - Check if any file has changed
|
||||
id: iac-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/iac/**
|
||||
./tests/providers/iac/**
|
||||
.poetry.lock
|
||||
|
||||
- name: IaC - Test
|
||||
if: steps.iac-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
|
||||
|
||||
# Common Tests
|
||||
- name: Lib - Test
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
|
||||
poetry run pytest -n auto --cov=./prowler/lib --cov-report=xml:lib_coverage.xml tests/lib
|
||||
|
||||
- name: Config - Test
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/config --cov-report=xml:config_coverage.xml tests/config
|
||||
|
||||
# Codecov
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler
|
||||
files: ./aws_coverage.xml,./azure_coverage.xml,./gcp_coverage.xml,./kubernetes_coverage.xml,./github_coverage.xml,./nhn_coverage.xml,./m365_coverage.xml,./lib_coverage.xml,./config_coverage.xml
|
||||
|
||||
10
.github/workflows/sdk-pypi-release.yml
vendored
10
.github/workflows/sdk-pypi-release.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: 3.11
|
||||
CACHE: "poetry"
|
||||
# CACHE: "poetry"
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
@@ -64,17 +64,17 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry==1.8.5
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: ${{ env.CACHE }}
|
||||
# cache: ${{ env.CACHE }}
|
||||
|
||||
- name: Build Prowler package
|
||||
run: |
|
||||
|
||||
@@ -4,7 +4,7 @@ name: SDK - Refresh AWS services' regions
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * *" #runs at 09:00 UTC everyday
|
||||
- cron: "0 9 * * 1" # runs at 09:00 UTC every Monday
|
||||
|
||||
env:
|
||||
GITHUB_BRANCH: "master"
|
||||
@@ -23,12 +23,12 @@ jobs:
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
uses: aws-actions/configure-aws-credentials@b47578312673ae6fa5b5096b330d9fbac3d116df # v4.2.1
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
@@ -50,12 +50,13 @@ jobs:
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services"
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws"
|
||||
title: "chore(regions_update): Changes in regions for AWS services"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -30,6 +30,7 @@ env:
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
|
||||
NEXT_PUBLIC_API_BASE_URL: http://prowler-api:8080/api/v1
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
@@ -61,38 +62,60 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ env.SHORT_SHA }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${{ env.RELEASE_TAG }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-ui-deploy
|
||||
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'
|
||||
|
||||
6
.github/workflows/ui-codeql.yml
vendored
6
.github/workflows/ui-codeql.yml
vendored
@@ -44,16 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
76
.github/workflows/ui-pull-request.yml
vendored
76
.github/workflows/ui-pull-request.yml
vendored
@@ -6,6 +6,7 @@ on:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- ".github/workflows/ui-pull-request.yml"
|
||||
- "ui/**"
|
||||
pull_request:
|
||||
branches:
|
||||
@@ -13,6 +14,9 @@ on:
|
||||
- "v5.*"
|
||||
paths:
|
||||
- 'ui/**'
|
||||
env:
|
||||
UI_WORKING_DIR: ./ui
|
||||
IMAGE_NAME: prowler-ui
|
||||
|
||||
jobs:
|
||||
test-and-coverage:
|
||||
@@ -23,19 +27,85 @@ jobs:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm install
|
||||
run: npm ci
|
||||
- name: Run Healthcheck
|
||||
working-directory: ./ui
|
||||
run: npm run healthcheck
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
|
||||
e2e-tests:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AUTH_SECRET: 'fallback-ci-secret-for-testing'
|
||||
AUTH_TRUST_HOST: true
|
||||
NEXTAUTH_URL: http://localhost:3000
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm ci
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-
|
||||
- name: Install Playwright browsers
|
||||
working-directory: ./ui
|
||||
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: npm run test:e2e:install
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
- name: Run Playwright tests
|
||||
working-directory: ./ui
|
||||
run: npm run test:e2e
|
||||
- name: Upload Playwright report
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
|
||||
if: failure()
|
||||
with:
|
||||
name: playwright-report
|
||||
path: ui/playwright-report/
|
||||
retention-days: 30
|
||||
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.UI_WORKING_DIR }}
|
||||
# Always build using `prod` target
|
||||
target: prod
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:latest
|
||||
outputs: type=docker
|
||||
build-args: |
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX
|
||||
|
||||
17
.gitignore
vendored
17
.gitignore
vendored
@@ -31,7 +31,7 @@ tags
|
||||
*.DS_Store
|
||||
|
||||
# Prowler output
|
||||
output/
|
||||
/output
|
||||
|
||||
# Prowler found secrets
|
||||
secrets-*/
|
||||
@@ -42,13 +42,28 @@ junit-reports/
|
||||
# VSCode files
|
||||
.vscode/
|
||||
|
||||
# Cursor files
|
||||
.cursorignore
|
||||
.cursor/
|
||||
|
||||
# RooCode files
|
||||
.roo/
|
||||
.rooignore
|
||||
.roomodes
|
||||
|
||||
# Cline files
|
||||
.cline/
|
||||
.clineignore
|
||||
|
||||
# Terraform
|
||||
.terraform*
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# .env
|
||||
ui/.env*
|
||||
api/.env*
|
||||
.env.local
|
||||
|
||||
# Coverage
|
||||
.coverage*
|
||||
|
||||
@@ -27,6 +27,7 @@ repos:
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.3.1
|
||||
@@ -58,11 +59,28 @@ repos:
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.8.0
|
||||
rev: 2.1.1
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
name: API - poetry-check
|
||||
args: ["--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-lock
|
||||
args: ["--no-update"]
|
||||
name: API - poetry-lock
|
||||
args: ["--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-check
|
||||
name: SDK - poetry-check
|
||||
args: ["--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-lock
|
||||
name: SDK - poetry-lock
|
||||
args: ["--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
@@ -97,7 +115,7 @@ repos:
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'safety check --ignore 70612,66963'
|
||||
entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
|
||||
61
Dockerfile
61
Dockerfile
@@ -1,38 +1,61 @@
|
||||
FROM python:3.12.8-alpine3.20
|
||||
FROM python:3.12.11-slim-bookworm AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
# Update system dependencies and install essential tools
|
||||
#hadolint ignore=DL3018
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl git
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget libicu72 libunwind8 libssl3 libcurl4 ca-certificates apt-transport-https gnupg \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
else \
|
||||
echo "Unsupported architecture: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
mkdir -p /opt/microsoft/powershell/7 && \
|
||||
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
|
||||
chmod +x /opt/microsoft/powershell/7/pwsh && \
|
||||
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
|
||||
rm /tmp/powershell.tar.gz
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
|
||||
# Create non-root user
|
||||
RUN mkdir -p /home/prowler && \
|
||||
echo 'prowler:x:1000:1000:prowler:/home/prowler:' > /etc/passwd && \
|
||||
echo 'prowler:x:1000:' > /etc/group && \
|
||||
chown -R prowler:prowler /home/prowler
|
||||
USER prowler
|
||||
|
||||
# Copy necessary files
|
||||
WORKDIR /home/prowler
|
||||
|
||||
# Copy necessary files
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler
|
||||
COPY README.md /home/prowler/
|
||||
COPY prowler/providers/m365/lib/powershell/m365_powershell.py /home/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
|
||||
# Install Python dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
ENV PATH="$HOME/.local/bin:$PATH"
|
||||
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
|
||||
pip install --no-cache-dir .
|
||||
ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
# Install PowerShell modules
|
||||
RUN poetry run python prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
RUN pip uninstall dash-html-components -y && \
|
||||
pip uninstall dash-core-components -y
|
||||
|
||||
# Remove Prowler directory and build files
|
||||
USER 0
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
|
||||
|
||||
USER prowler
|
||||
ENTRYPOINT ["prowler"]
|
||||
ENTRYPOINT ["poetry", "run", "prowler"]
|
||||
|
||||
188
README.md
188
README.md
@@ -3,7 +3,7 @@
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
|
||||
</p>
|
||||
<p align="center">
|
||||
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
<b><i>Prowler</b> is the Open Cloud Security platform trusted by thousands to automate security and compliance in any cloud environment. With hundreds of ready-to-use checks and compliance frameworks, Prowler delivers real-time, customizable monitoring and seamless integrations, making cloud security simple, scalable, and cost-effective for organizations of any size.
|
||||
</p>
|
||||
<p align="center">
|
||||
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
|
||||
@@ -43,15 +43,29 @@
|
||||
|
||||
# Description
|
||||
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
|
||||
**Prowler** is an open-source security tool designed to assess and enforce security best practices across AWS, Azure, Google Cloud, and Kubernetes. It supports tasks such as security audits, incident response, continuous monitoring, system hardening, forensic readiness, and remediation processes.
|
||||
|
||||
Prowler includes hundreds of built-in controls to ensure compliance with standards and frameworks, including:
|
||||
|
||||
- **Industry Standards:** CIS, NIST 800, NIST CSF, and CISA
|
||||
- **Regulatory Compliance and Governance:** RBI, FedRAMP, and PCI-DSS
|
||||
- **Frameworks for Sensitive Data and Privacy:** GDPR, HIPAA, and FFIEC
|
||||
- **Frameworks for Organizational Governance and Quality Control:** SOC2 and GXP
|
||||
- **AWS-Specific Frameworks:** AWS Foundational Technical Review (FTR) and AWS Well-Architected Framework (Security Pillar)
|
||||
- **National Security Standards:** ENS (Spanish National Security Scheme)
|
||||
- **Custom Security Frameworks:** Tailored to your needs
|
||||
|
||||
## Prowler CLI and Prowler Cloud
|
||||
|
||||
Prowler offers a Command Line Interface (CLI), known as Prowler Open Source, and an additional service built on top of it, called <a href="https://prowler.com">Prowler Cloud</a>.
|
||||
|
||||
## Prowler App
|
||||
|
||||
Prowler App is a web application that allows you to run Prowler in your cloud provider accounts and visualize the results in a user-friendly interface.
|
||||
Prowler App is a web-based application that simplifies running Prowler across your cloud provider accounts. It provides a user-friendly interface to visualize the results and streamline your security assessments.
|
||||
|
||||

|
||||
|
||||
>More details at [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
|
||||
>For more details, refer to the [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
@@ -60,6 +74,7 @@ prowler <provider>
|
||||
```
|
||||

|
||||
|
||||
|
||||
## Prowler Dashboard
|
||||
|
||||
```console
|
||||
@@ -67,22 +82,34 @@ prowler dashboard
|
||||
```
|
||||

|
||||
|
||||
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks.
|
||||
# Prowler at a Glance
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 561 | 81 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 4 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 139 | 18 -> `prowler azure --list-services` | 4 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
| AWS | 567 | 82 | 36 | 10 |
|
||||
| GCP | 79 | 13 | 10 | 3 |
|
||||
| Azure | 142 | 18 | 10 | 3 |
|
||||
| Kubernetes | 83 | 7 | 5 | 7 |
|
||||
| GitHub | 16 | 2 | 1 | 0 |
|
||||
| M365 | 69 | 7 | 3 | 2 |
|
||||
| NHN (Unofficial) | 6 | 2 | 1 | 0 |
|
||||
|
||||
> [!Note]
|
||||
> The numbers in the table are updated periodically.
|
||||
|
||||
> [!Tip]
|
||||
> For the most accurate and up-to-date information about checks, services, frameworks, and categories, visit [**Prowler Hub**](https://hub.prowler.com).
|
||||
|
||||
> [!Note]
|
||||
> Use the following commands to list Prowler's available checks, services, compliance frameworks, and categories: `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
|
||||
|
||||
# 💻 Installation
|
||||
|
||||
## Prowler App
|
||||
|
||||
Prowler App can be installed in different ways, depending on your environment:
|
||||
Prowler App offers flexible installation methods tailored to various environments:
|
||||
|
||||
> See how to use Prowler App in the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
|
||||
> For detailed instructions on using Prowler App, refer to the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
|
||||
|
||||
### Docker Compose
|
||||
|
||||
@@ -98,15 +125,31 @@ curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/mast
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
> Containers are built for `linux/amd64`.
|
||||
|
||||
### Configuring Your Workstation for Prowler App
|
||||
|
||||
If your workstation's architecture is incompatible, you can resolve this by:
|
||||
|
||||
- **Setting the environment variable**: `DOCKER_DEFAULT_PLATFORM=linux/amd64`
|
||||
- **Using the following flag in your Docker command**: `--platform linux/amd64`
|
||||
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
|
||||
### Common Issues with Docker Pull Installation
|
||||
|
||||
> [!Note]
|
||||
If you want to use AWS role assumption (e.g., with the "Connect assuming IAM Role" option), you may need to mount your local `.aws` directory into the container as a volume (e.g., `- "${HOME}/.aws:/home/prowler/.aws:ro"`). There are several ways to configure credentials for Docker containers. See the [Troubleshooting](./docs/troubleshooting.md) section for more details and examples.
|
||||
|
||||
You can find more information in the [Troubleshooting](./docs/troubleshooting.md) section.
|
||||
|
||||
|
||||
### From GitHub
|
||||
|
||||
**Requirements**
|
||||
|
||||
* `git` installed.
|
||||
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `poetry` v2 installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
@@ -116,7 +159,7 @@ docker compose up -d
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
poetry shell
|
||||
eval $(poetry env activate)
|
||||
set -a
|
||||
source .env
|
||||
docker compose up postgres valkey -d
|
||||
@@ -124,8 +167,13 @@ cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> As of Poetry v2.0.0, the `poetry shell` command has been deprecated. Use `poetry env activate` instead for environment activation.
|
||||
>
|
||||
> If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment.
|
||||
> For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
|
||||
|
||||
> Now, you can access the API documentation at http://localhost:8080/api/v1/docs.
|
||||
> After completing the setup, access the API documentation at http://localhost:8080/api/v1/docs.
|
||||
|
||||
**Commands to run the API Worker**
|
||||
|
||||
@@ -133,7 +181,7 @@ gunicorn -c config/guniconf.py config.wsgi:application
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
poetry shell
|
||||
eval $(poetry env activate)
|
||||
set -a
|
||||
source .env
|
||||
cd src/backend
|
||||
@@ -146,7 +194,7 @@ python -m celery -A config.celery worker -l info -E
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
poetry shell
|
||||
eval $(poetry env activate)
|
||||
set -a
|
||||
source .env
|
||||
cd src/backend
|
||||
@@ -163,29 +211,31 @@ npm run build
|
||||
npm start
|
||||
```
|
||||
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
|
||||
## Prowler CLI
|
||||
### Pip package
|
||||
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
|
||||
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/). Consequently, it can be installed using pip with Python >3.9.1, <3.13:
|
||||
|
||||
```console
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli-installation)
|
||||
>For further guidance, refer to [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli-installation)
|
||||
|
||||
### Containers
|
||||
|
||||
The available versions of Prowler CLI are the following:
|
||||
**Available Versions of Prowler CLI**
|
||||
|
||||
- `latest`: in sync with `master` branch (bear in mind that it is not a stable version)
|
||||
- `v4-latest`: in sync with `v4` branch (bear in mind that it is not a stable version)
|
||||
- `v3-latest`: in sync with `v3` branch (bear in mind that it is not a stable version)
|
||||
- `<x.y.z>` (release): you can find the releases [here](https://github.com/prowler-cloud/prowler/releases), those are stable releases.
|
||||
- `stable`: this tag always point to the latest release.
|
||||
- `v4-stable`: this tag always point to the latest release for v4.
|
||||
- `v3-stable`: this tag always point to the latest release for v3.
|
||||
The following versions of Prowler CLI are available, depending on your requirements:
|
||||
|
||||
- `latest`: Synchronizes with the `master` branch. Note that this version is not stable.
|
||||
- `v4-latest`: Synchronizes with the `v4` branch. Note that this version is not stable.
|
||||
- `v3-latest`: Synchronizes with the `v3` branch. Note that this version is not stable.
|
||||
- `<x.y.z>` (release): Stable releases corresponding to specific versions. You can find the complete list of releases [here](https://github.com/prowler-cloud/prowler/releases).
|
||||
- `stable`: Always points to the latest release.
|
||||
- `v4-stable`: Always points to the latest release for v4.
|
||||
- `v3-stable`: Always points to the latest release for v3.
|
||||
|
||||
The container images are available here:
|
||||
- Prowler CLI:
|
||||
@@ -197,29 +247,56 @@ The container images are available here:
|
||||
|
||||
### From GitHub
|
||||
|
||||
Python >= 3.9, < 3.13 is required with pip and poetry:
|
||||
Python >3.9.1, <3.13 is required with pip and Poetry:
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
poetry shell
|
||||
eval $(poetry env activate)
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
python prowler-cli.py -v
|
||||
```
|
||||
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
# 📐✏️ High level architecture
|
||||
> [!IMPORTANT]
|
||||
> To clone Prowler on Windows, configure Git to support long file paths by running the following command: `git config core.longpaths true`.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> As of Poetry v2.0.0, the `poetry shell` command has been deprecated. Use `poetry env activate` instead for environment activation.
|
||||
>
|
||||
> If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment.
|
||||
> For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
|
||||
|
||||
# ✏️ High level architecture
|
||||
|
||||
## Prowler App
|
||||
The **Prowler App** consists of three main components:
|
||||
**Prowler App** is composed of three key components:
|
||||
|
||||
- **Prowler UI**: A user-friendly web interface for running Prowler and viewing results, powered by Next.js.
|
||||
- **Prowler API**: The backend API that executes Prowler scans and stores the results, built with Django REST Framework.
|
||||
- **Prowler SDK**: A Python SDK that integrates with the Prowler CLI for advanced functionality.
|
||||
- **Prowler UI**: A web-based interface, built with Next.js, providing a user-friendly experience for executing Prowler scans and visualizing results.
|
||||
- **Prowler API**: A backend service, developed with Django REST Framework, responsible for running Prowler scans and storing the generated results.
|
||||
- **Prowler SDK**: A Python SDK designed to extend the functionality of the Prowler CLI for advanced capabilities.
|
||||
|
||||

|
||||
|
||||
## Prowler CLI
|
||||
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
|
||||
|
||||
**Running Prowler**
|
||||
|
||||
Prowler can be executed across various environments, offering flexibility to meet your needs. It can be run from:
|
||||
|
||||
- Your own workstation
|
||||
|
||||
- A Kubernetes Job
|
||||
|
||||
- Google Compute Engine
|
||||
|
||||
- Azure Virtual Machines (VMs)
|
||||
|
||||
- Amazon EC2 instances
|
||||
|
||||
- AWS Fargate or other container platforms
|
||||
|
||||
- CloudShell
|
||||
|
||||
And many more environments.
|
||||
|
||||

|
||||
|
||||
@@ -227,23 +304,36 @@ You can run Prowler from your workstation, a Kubernetes Job, a Google Compute En
|
||||
|
||||
## General
|
||||
- `Allowlist` now is called `Mutelist`.
|
||||
- The `--quiet` option has been deprecated, now use the `--status` flag to select the finding's status you want to get from PASS, FAIL or MANUAL.
|
||||
- All `INFO` finding's status has changed to `MANUAL`.
|
||||
- The CSV output format is common for all the providers.
|
||||
- The `--quiet` option has been deprecated. Use the `--status` flag to filter findings based on their status: PASS, FAIL, or MANUAL.
|
||||
- All findings with an `INFO` status have been reclassified as `MANUAL`.
|
||||
- The CSV output format is standardized across all providers.
|
||||
|
||||
We have deprecated some of our outputs formats:
|
||||
- The native JSON is replaced for the JSON [OCSF](https://schema.ocsf.io/) v1.1.0, common for all the providers.
|
||||
**Deprecated Output Formats**
|
||||
|
||||
The following formats are now deprecated:
|
||||
- Native JSON has been replaced with JSON in [OCSF] v1.1.0 format, which is standardized across all providers (https://schema.ocsf.io/).
|
||||
|
||||
## AWS
|
||||
- Deprecate the AWS flag --sts-endpoint-region since we use AWS STS regional tokens.
|
||||
- To send only FAILS to AWS Security Hub, now use either `--send-sh-only-fails` or `--security-hub --status FAIL`.
|
||||
|
||||
**AWS Flag Deprecation**
|
||||
|
||||
The flag --sts-endpoint-region has been deprecated due to the adoption of AWS STS regional tokens.
|
||||
|
||||
**Sending FAIL Results to AWS Security Hub**
|
||||
|
||||
- To send only FAILS to AWS Security Hub, use one of the following options: `--send-sh-only-fails` or `--security-hub --status FAIL`.
|
||||
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
Install, Usage, Tutorials and Developer Guide is at https://docs.prowler.com/
|
||||
**Documentation Resources**
|
||||
|
||||
For installation instructions, usage details, tutorials, and the Developer Guide, visit https://docs.prowler.com/
|
||||
|
||||
# 📃 License
|
||||
|
||||
Prowler is licensed as Apache License 2.0 as specified in each file. You may obtain a copy of the License at
|
||||
<http://www.apache.org/licenses/LICENSE-2.0>
|
||||
**Prowler License Information**
|
||||
|
||||
Prowler is licensed under the Apache License 2.0, as indicated in each file within the repository. Obtaining a Copy of the License
|
||||
|
||||
A copy of the License is available at <http://www.apache.org/licenses/LICENSE-2.0>
|
||||
|
||||
@@ -23,6 +23,7 @@ DJANGO_SECRETS_ENCRYPTION_KEY=""
|
||||
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
|
||||
DJANGO_CELERY_DEADLOCK_ATTEMPTS=5
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
|
||||
DJANGO_SENTRY_DSN=
|
||||
|
||||
# PostgreSQL settings
|
||||
# If running django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
@@ -39,3 +40,19 @@ POSTGRES_DB=prowler_db
|
||||
VALKEY_HOST=[localhost|valkey]
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_DB=0
|
||||
|
||||
# Sentry settings
|
||||
SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
# Social login credentials
|
||||
DJANGO_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
DJANGO_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
DJANGO_GOOGLE_OAUTH_CALLBACK_URL=""
|
||||
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_ID=""
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
DJANGO_GITHUB_OAUTH_CALLBACK_URL=""
|
||||
|
||||
# Deletion Task Batch Size
|
||||
DJANGO_DELETION_BATCH_SIZE=5000
|
||||
|
||||
168
api/.gitignore
vendored
168
api/.gitignore
vendored
@@ -1,168 +0,0 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
/_data/
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||
.pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
*.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
|
||||
# VSCode
|
||||
.vscode/
|
||||
@@ -1,91 +0,0 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: ["--unsafe"]
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: no-commit-to-branch
|
||||
- id: pretty-format-json
|
||||
args: ["--autofix", "--no-sort-keys", "--no-ensure-ascii"]
|
||||
exclude: 'src/backend/api/fixtures/dev/.*\.json$'
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
files: pyproject.toml
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.10.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
## PYTHON
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
args: ["--directory=src"]
|
||||
- id: poetry-lock
|
||||
args: ["--no-update", "--directory=src"]
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013", "Dockerfile"]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: bash -c 'poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: trufflehog
|
||||
name: TruffleHog
|
||||
description: Detect secrets in your data.
|
||||
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["commit", "push"]
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'poetry run bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'poetry run safety check --ignore 70612,66963'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'poetry run vulture --exclude "contrib,.venv,tests,conftest.py" --min-confidence 100 .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
207
api/CHANGELOG.md
Normal file
207
api/CHANGELOG.md
Normal file
@@ -0,0 +1,207 @@
|
||||
# Prowler API Changelog
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.10.2] (Prowler v5.9.2)
|
||||
|
||||
### Changed
|
||||
- Optimized queries for resources views [(#8336)](https://github.com/prowler-cloud/prowler/pull/8336)
|
||||
|
||||
---
|
||||
|
||||
## [v1.10.1] (Prowler v5.9.1)
|
||||
|
||||
### Fixed
|
||||
- Calculate failed findings during scans to prevent heavy database queries [(#8322)](https://github.com/prowler-cloud/prowler/pull/8322)
|
||||
|
||||
---
|
||||
|
||||
## [v1.10.0] (Prowler v5.9.0)
|
||||
|
||||
### Added
|
||||
- SSO with SAML support [(#8175)](https://github.com/prowler-cloud/prowler/pull/8175)
|
||||
- `GET /resources/metadata`, `GET /resources/metadata/latest` and `GET /resources/latest` to expose resource metadata and latest scan results [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
|
||||
### Changed
|
||||
- `/processors` endpoints to post-process findings. Currently, only the Mutelist processor is supported to allow to mute findings.
|
||||
- Optimized the underlying queries for resources endpoints [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- Optimized include parameters for resources view [(#8229)](https://github.com/prowler-cloud/prowler/pull/8229)
|
||||
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
|
||||
|
||||
### Fixed
|
||||
- Search filter for findings and resources [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- RBAC is now applied to `GET /overviews/providers` [(#8277)](https://github.com/prowler-cloud/prowler/pull/8277)
|
||||
|
||||
### Changed
|
||||
- `POST /schedules/daily` returns a `409 CONFLICT` if already created [(#8258)](https://github.com/prowler-cloud/prowler/pull/8258)
|
||||
|
||||
### Security
|
||||
- Enhanced password validation to enforce 12+ character passwords with special characters, uppercase, lowercase, and numbers [(#8225)](https://github.com/prowler-cloud/prowler/pull/8225)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.1] (Prowler v5.8.1)
|
||||
|
||||
### Added
|
||||
- Custom exception for provider connection errors during scans [(#8234)](https://github.com/prowler-cloud/prowler/pull/8234)
|
||||
|
||||
### Changed
|
||||
- Summary and overview tasks now use a dedicated queue and no longer propagate errors to compliance tasks [(#8214)](https://github.com/prowler-cloud/prowler/pull/8214)
|
||||
|
||||
### Fixed
|
||||
- Scan with no resources will not trigger legacy code for findings metadata [(#8183)](https://github.com/prowler-cloud/prowler/pull/8183)
|
||||
- Invitation email comparison case-insensitive [(#8206)](https://github.com/prowler-cloud/prowler/pull/8206)
|
||||
|
||||
### Removed
|
||||
- Validation of the provider's secret type during updates [(#8197)](https://github.com/prowler-cloud/prowler/pull/8197)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.0] (Prowler v5.8.0)
|
||||
|
||||
### Added
|
||||
- Support GCP Service Account key [(#7824)](https://github.com/prowler-cloud/prowler/pull/7824)
|
||||
- `GET /compliance-overviews` endpoints to retrieve compliance metadata and specific requirements statuses [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Lighthouse configuration support [(#7848)](https://github.com/prowler-cloud/prowler/pull/7848)
|
||||
|
||||
### Changed
|
||||
- Reworked `GET /compliance-overviews` to return proper requirement metrics [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Optional `user` and `password` for M365 provider [(#7992)](https://github.com/prowler-cloud/prowler/pull/7992)
|
||||
|
||||
### Fixed
|
||||
- Scheduled scans are no longer deleted when their daily schedule run is disabled [(#8082)](https://github.com/prowler-cloud/prowler/pull/8082)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.5] (Prowler v5.7.5)
|
||||
|
||||
### Fixed
|
||||
- Normalize provider UID to ensure safe and unique export directory paths [(#8007)](https://github.com/prowler-cloud/prowler/pull/8007).
|
||||
- Blank resource types in `/metadata` endpoints [(#8027)](https://github.com/prowler-cloud/prowler/pull/8027)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.4] (Prowler v5.7.4)
|
||||
|
||||
### Removed
|
||||
- Reverted RLS transaction handling and DB custom backend [(#7994)](https://github.com/prowler-cloud/prowler/pull/7994)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.3] (Prowler v5.7.3)
|
||||
|
||||
### Added
|
||||
- Database backend to handle already closed connections [(#7935)](https://github.com/prowler-cloud/prowler/pull/7935)
|
||||
|
||||
### Changed
|
||||
- Renamed field encrypted_password to password for M365 provider [(#7784)](https://github.com/prowler-cloud/prowler/pull/7784)
|
||||
|
||||
### Fixed
|
||||
- Transaction persistence with RLS operations [(#7916)](https://github.com/prowler-cloud/prowler/pull/7916)
|
||||
- Reverted the change `get_with_retry` to use the original `get` method for retrieving tasks [(#7932)](https://github.com/prowler-cloud/prowler/pull/7932)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.2] (Prowler v5.7.2)
|
||||
|
||||
### Fixed
|
||||
- Task lookup to use task_kwargs instead of task_args for scan report resolution [(#7830)](https://github.com/prowler-cloud/prowler/pull/7830)
|
||||
- Kubernetes UID validation to allow valid context names [(#7871)](https://github.com/prowler-cloud/prowler/pull/7871)
|
||||
- Connection status verification before launching a scan [(#7831)](https://github.com/prowler-cloud/prowler/pull/7831)
|
||||
- Race condition when creating background tasks [(#7876)](https://github.com/prowler-cloud/prowler/pull/7876)
|
||||
- Error when modifying or retrieving tenants due to missing user UUID in transaction context [(#7890)](https://github.com/prowler-cloud/prowler/pull/7890)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.1] (Prowler v5.7.1)
|
||||
|
||||
### Fixed
|
||||
- Added database index to improve performance on finding lookup [(#7800)](https://github.com/prowler-cloud/prowler/pull/7800)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.0] (Prowler v5.7.0)
|
||||
|
||||
### Added
|
||||
- Huge improvements to `/findings/metadata` and resource related filters for findings [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- Improvements to `/overviews` endpoints [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- Queue to perform backfill background tasks [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- New endpoints to retrieve latest findings and metadata [(#7743)](https://github.com/prowler-cloud/prowler/pull/7743)
|
||||
- Export support for Prowler ThreatScore in M365 [(7783)](https://github.com/prowler-cloud/prowler/pull/7783)
|
||||
|
||||
---
|
||||
|
||||
## [v1.7.0] (Prowler v5.6.0)
|
||||
|
||||
### Added
|
||||
|
||||
- M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563)
|
||||
- `compliance/` folder and ZIP‐export functionality for all compliance reports [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
|
||||
- API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
|
||||
|
||||
---
|
||||
|
||||
## [v1.6.0] (Prowler v5.5.0)
|
||||
|
||||
### Added
|
||||
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167)
|
||||
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289)
|
||||
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333)
|
||||
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378)
|
||||
- Missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.4] (Prowler v5.4.4)
|
||||
|
||||
### Fixed
|
||||
- Bug with periodic tasks when trying to delete a provider [(#7466)](https://github.com/prowler-cloud/prowler/pull/7466)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.3] (Prowler v5.4.3)
|
||||
|
||||
### Fixed
|
||||
- Duplicated scheduled scans handling [(#7401)](https://github.com/prowler-cloud/prowler/pull/7401)
|
||||
- Environment variable to configure the deletion task batch size [(#7423)](https://github.com/prowler-cloud/prowler/pull/7423)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.2] (Prowler v5.4.2)
|
||||
|
||||
### Changed
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.1] (Prowler v5.4.1)
|
||||
|
||||
### Fixed
|
||||
- Handle response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183)
|
||||
- Race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172)
|
||||
- Handle exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.0] (Prowler v5.4.0)
|
||||
|
||||
### Added
|
||||
- Social login integration with Google and GitHub [(#6906)](https://github.com/prowler-cloud/prowler/pull/6906)
|
||||
- API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878)
|
||||
- Configurable Sentry integration [(#6874)](https://github.com/prowler-cloud/prowler/pull/6874)
|
||||
|
||||
### Changed
|
||||
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019)
|
||||
|
||||
---
|
||||
|
||||
## [v1.4.0] (Prowler v5.3.0)
|
||||
|
||||
### Changed
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700)
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800)
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863)
|
||||
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869)
|
||||
|
||||
---
|
||||
@@ -1,13 +1,45 @@
|
||||
FROM python:3.12.8-alpine3.20 AS build
|
||||
FROM python:3.12.10-slim-bookworm AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/api"
|
||||
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk --no-cache add gcc python3-dev musl-dev linux-headers curl-dev
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget \
|
||||
libicu72 \
|
||||
gcc \
|
||||
g++ \
|
||||
make \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
pkg-config \
|
||||
libtool \
|
||||
libxslt1-dev \
|
||||
python3-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
else \
|
||||
echo "Unsupported architecture: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
mkdir -p /opt/microsoft/powershell/7 && \
|
||||
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
|
||||
chmod +x /opt/microsoft/powershell/7/pwsh && \
|
||||
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
|
||||
rm /tmp/powershell.tar.gz
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
|
||||
RUN apk --no-cache upgrade && \
|
||||
addgroup -g 1000 prowler && \
|
||||
adduser -D -u 1000 -G prowler prowler
|
||||
USER prowler
|
||||
|
||||
WORKDIR /home/prowler
|
||||
@@ -17,27 +49,22 @@ COPY pyproject.toml ./
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
RUN poetry install && \
|
||||
# Add `--no-root` to avoid installing the current project as a package
|
||||
RUN poetry install --no-root && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
WORKDIR /home/prowler/backend
|
||||
|
||||
# Development image
|
||||
# hadolint ignore=DL3006
|
||||
FROM build AS dev
|
||||
|
||||
USER 0
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk --no-cache add curl vim
|
||||
|
||||
USER prowler
|
||||
|
||||
ENTRYPOINT ["../docker-entrypoint.sh", "dev"]
|
||||
|
||||
# Production image
|
||||
|
||||
@@ -235,6 +235,7 @@ To view the logs for any component (e.g., Django, Celery worker), you can use th
|
||||
|
||||
```console
|
||||
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
```
|
||||
|
||||
## Applying migrations
|
||||
|
||||
@@ -256,7 +257,7 @@ cd src/backend
|
||||
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
|
||||
```
|
||||
|
||||
> The default credentials are `dev@prowler.com:thisisapassword123` or `dev2@prowler.com:thisisapassword123`
|
||||
> The default credentials are `dev@prowler.com:Thisisapassword123@` or `dev2@prowler.com:Thisisapassword123@`
|
||||
|
||||
## Run tests
|
||||
|
||||
@@ -269,3 +270,66 @@ poetry shell
|
||||
cd src/backend
|
||||
pytest
|
||||
```
|
||||
|
||||
# Custom commands
|
||||
|
||||
Django provides a way to create custom commands that can be run from the command line.
|
||||
|
||||
> These commands can be found in: ```prowler/api/src/backend/api/management/commands```
|
||||
|
||||
To run a custom command, you need to be in the `prowler/api/src/backend` directory and run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
python manage.py <command_name>
|
||||
```
|
||||
|
||||
## Generate dummy data
|
||||
|
||||
```console
|
||||
python manage.py findings --tenant
|
||||
<TENANT_ID> --findings <NUM_FINDINGS> --re
|
||||
sources <NUM_RESOURCES> --batch <TRANSACTION_BATCH_SIZE> --alias <ALIAS>
|
||||
```
|
||||
|
||||
This command creates, for a given tenant, a provider, scan and a set of findings and resources related altogether.
|
||||
|
||||
> Scan progress and state are updated in real time.
|
||||
> - 0-33%: Create resources.
|
||||
> - 33-66%: Create findings.
|
||||
> - 66%: Create resource-finding mapping.
|
||||
>
|
||||
> The last step is required to access the findings details, since the UI needs that to print all the information.
|
||||
|
||||
### Example
|
||||
|
||||
```console
|
||||
~/backend $ poetry run python manage.py findings --tenant
|
||||
fffb1893-3fc7-4623-a5d9-fae47da1c528 --findings 25000 --re
|
||||
sources 1000 --batch 5000 --alias test-script
|
||||
|
||||
Starting data population
|
||||
Tenant: fffb1893-3fc7-4623-a5d9-fae47da1c528
|
||||
Alias: test-script
|
||||
Resources: 1000
|
||||
Findings: 25000
|
||||
Batch size: 5000
|
||||
|
||||
|
||||
Creating resources...
|
||||
100%|███████████████████████| 1/1 [00:00<00:00, 7.72it/s]
|
||||
Resources created successfully.
|
||||
|
||||
|
||||
Creating findings...
|
||||
100%|███████████████████████| 5/5 [00:05<00:00, 1.09s/it]
|
||||
Findings created successfully.
|
||||
|
||||
|
||||
Creating resource-finding mappings...
|
||||
100%|███████████████████████| 5/5 [00:02<00:00, 1.81it/s]
|
||||
Resource-finding mappings created successfully.
|
||||
|
||||
|
||||
Successfully populated test data.
|
||||
```
|
||||
|
||||
@@ -1,125 +0,0 @@
|
||||
services:
|
||||
api:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-api
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8000}:${DJANGO_PORT:-8000}"
|
||||
profiles:
|
||||
- prod
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "prod"
|
||||
|
||||
api-dev:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
target: dev
|
||||
image: prowler-api-dev
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=config.django.devel
|
||||
- DJANGO_LOGGING_FORMATTER=human_readable
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8080}:${DJANGO_PORT:-8080}"
|
||||
volumes:
|
||||
- "./src/backend:/home/prowler/backend"
|
||||
- "./pyproject.toml:/home/prowler/pyproject.toml"
|
||||
profiles:
|
||||
- dev
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "dev"
|
||||
|
||||
postgres:
|
||||
image: postgres:16.3-alpine
|
||||
ports:
|
||||
- "${POSTGRES_PORT:-5432}:${POSTGRES_PORT:-5432}"
|
||||
hostname: "postgres-db"
|
||||
volumes:
|
||||
- ./_data/postgres:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=${POSTGRES_ADMIN_USER:-prowler}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_ADMIN_PASSWORD:-S3cret}
|
||||
- POSTGRES_DB=${POSTGRES_DB:-prowler_db}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_ADMIN_USER:-prowler} -d ${POSTGRES_DB:-prowler_db}'"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine3.19
|
||||
ports:
|
||||
- "${VALKEY_PORT:-6379}:6379"
|
||||
hostname: "valkey"
|
||||
volumes:
|
||||
- ./_data/valkey:/data
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'valkey-cli ping'"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
worker:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "worker"
|
||||
|
||||
worker-beat:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "beat"
|
||||
@@ -3,6 +3,10 @@
|
||||
|
||||
apply_migrations() {
|
||||
echo "Applying database migrations..."
|
||||
|
||||
# Fix Inconsistent migration history after adding sites app
|
||||
poetry run python manage.py check_and_fix_socialaccount_sites_migration --database admin
|
||||
|
||||
poetry run python manage.py migrate --database admin
|
||||
}
|
||||
|
||||
@@ -28,7 +32,7 @@ start_prod_server() {
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans -E
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview -E --max-tasks-per-child 1
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
|
||||
5366
api/poetry.lock
generated
5366
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -2,43 +2,54 @@
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
|
||||
[tool.poetry]
|
||||
authors = ["Prowler Team"]
|
||||
[project]
|
||||
authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django==5.1.10",
|
||||
"django-allauth[saml] (>=65.8.0,<66.0.0)",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
"django-cors-headers==4.4.0",
|
||||
"django-environ==0.11.2",
|
||||
"django-filter==24.3",
|
||||
"django-guid==3.5.0",
|
||||
"django-postgres-extra (>=2.0.8,<3.0.0)",
|
||||
"djangorestframework==3.15.2",
|
||||
"djangorestframework-jsonapi==7.0.2",
|
||||
"djangorestframework-simplejwt (>=5.3.1,<6.0.0)",
|
||||
"drf-nested-routers (>=0.94.1,<1.0.0)",
|
||||
"drf-spectacular==0.27.2",
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"lxml==5.3.2",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.9",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
"uuid6==2024.7.10",
|
||||
"openai (>=1.82.0,<2.0.0)",
|
||||
"xmlsec==1.3.14"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
name = "prowler-api"
|
||||
package-mode = false
|
||||
version = "1.1.0"
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.10.2"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
celery = {extras = ["pytest"], version = "^5.4.0"}
|
||||
django = "5.1.4"
|
||||
django-celery-beat = "^2.7.0"
|
||||
django-celery-results = "^2.5.1"
|
||||
django-cors-headers = "4.4.0"
|
||||
django-environ = "0.11.2"
|
||||
django-filter = "24.3"
|
||||
django-guid = "3.5.0"
|
||||
django-postgres-extra = "^2.0.8"
|
||||
djangorestframework = "3.15.2"
|
||||
djangorestframework-jsonapi = "7.0.2"
|
||||
djangorestframework-simplejwt = "^5.3.1"
|
||||
drf-nested-routers = "^0.94.1"
|
||||
drf-spectacular = "0.27.2"
|
||||
drf-spectacular-jsonapi = "0.5.1"
|
||||
gunicorn = "23.0.0"
|
||||
prowler = "^5.0"
|
||||
psycopg2-binary = "2.9.9"
|
||||
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
|
||||
# Needed for prowler compatibility
|
||||
python = ">=3.11,<3.13"
|
||||
uuid6 = "2024.7.10"
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.9"
|
||||
coverage = "7.5.4"
|
||||
django-silk = "5.3.2"
|
||||
docker = "7.1.0"
|
||||
freezegun = "1.5.1"
|
||||
marshmallow = ">=3.15.0,<4.0.0"
|
||||
mypy = "1.10.1"
|
||||
pylint = "3.2.5"
|
||||
pytest = "8.2.2"
|
||||
@@ -49,7 +60,5 @@ pytest-randomly = "3.15.0"
|
||||
pytest-xdist = "3.6.1"
|
||||
ruff = "0.5.0"
|
||||
safety = "3.2.9"
|
||||
tqdm = "4.67.1"
|
||||
vulture = "2.14"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
71
api/src/backend/api/adapters.py
Normal file
71
api/src/backend/api/adapters.py
Normal file
@@ -0,0 +1,71 @@
|
||||
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
|
||||
from django.db import transaction
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Membership, Role, Tenant, User, UserRoleRelationship
|
||||
|
||||
|
||||
class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
@staticmethod
|
||||
def get_user_by_email(email: str):
|
||||
try:
|
||||
return User.objects.get(email=email)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
def pre_social_login(self, request, sociallogin):
|
||||
# Link existing accounts with the same email address
|
||||
email = sociallogin.account.extra_data.get("email")
|
||||
if sociallogin.provider.id == "saml":
|
||||
email = sociallogin.user.email
|
||||
if email:
|
||||
existing_user = self.get_user_by_email(email)
|
||||
if existing_user:
|
||||
sociallogin.connect(request, existing_user)
|
||||
|
||||
def save_user(self, request, sociallogin, form=None):
|
||||
"""
|
||||
Called after the user data is fully populated from the provider
|
||||
and is about to be saved to the DB for the first time.
|
||||
"""
|
||||
with transaction.atomic(using=MainRouter.admin_db):
|
||||
user = super().save_user(request, sociallogin, form)
|
||||
provider = sociallogin.provider.id
|
||||
extra = sociallogin.account.extra_data
|
||||
|
||||
if provider != "saml":
|
||||
# Handle other providers (e.g., GitHub, Google)
|
||||
user.save(using=MainRouter.admin_db)
|
||||
social_account_name = extra.get("name")
|
||||
if social_account_name:
|
||||
user.name = social_account_name
|
||||
user.save(using=MainRouter.admin_db)
|
||||
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(
|
||||
name=f"{user.email.split('@')[0]} default tenant"
|
||||
)
|
||||
with rls_transaction(str(tenant.id)):
|
||||
Membership.objects.using(MainRouter.admin_db).create(
|
||||
user=user, tenant=tenant, role=Membership.RoleChoices.OWNER
|
||||
)
|
||||
role = Role.objects.using(MainRouter.admin_db).create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
else:
|
||||
request.session["saml_user_created"] = str(user.id)
|
||||
|
||||
return user
|
||||
@@ -109,16 +109,6 @@ class BaseTenantViewset(BaseViewSet):
|
||||
pass # Tenant might not exist, handle gracefully
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
if (
|
||||
request.resolver_match.url_name != "tenant-detail"
|
||||
and request.method != "DELETE"
|
||||
):
|
||||
user_id = str(request.user.id)
|
||||
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
# TODO: DRY this when we have time
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
@@ -126,8 +116,8 @@ class BaseTenantViewset(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
user_id = str(request.user.id)
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
@@ -1,12 +1,38 @@
|
||||
from types import MappingProxyType
|
||||
|
||||
from api.models import Provider
|
||||
from prowler.config.config import get_available_compliance_frameworks
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
from api.models import Provider
|
||||
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = {}
|
||||
PROWLER_CHECKS = {}
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS = {}
|
||||
|
||||
|
||||
def get_compliance_frameworks(provider_type: Provider.ProviderChoices) -> list[str]:
|
||||
"""
|
||||
Retrieve and cache the list of available compliance frameworks for a specific cloud provider.
|
||||
|
||||
This function lazily loads and caches the available compliance frameworks (e.g., CIS, MITRE, ISO)
|
||||
for each provider type (AWS, Azure, GCP, etc.) on first access. Subsequent calls for the same
|
||||
provider will return the cached result.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The cloud provider type for which to retrieve
|
||||
available compliance frameworks (e.g., "aws", "azure", "gcp", "m365").
|
||||
|
||||
Returns:
|
||||
list[str]: A list of framework identifiers (e.g., "cis_1.4_aws", "mitre_attack_azure") available
|
||||
for the given provider.
|
||||
"""
|
||||
global AVAILABLE_COMPLIANCE_FRAMEWORKS
|
||||
if provider_type not in AVAILABLE_COMPLIANCE_FRAMEWORKS:
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type] = (
|
||||
get_available_compliance_frameworks(provider_type)
|
||||
)
|
||||
|
||||
return AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type]
|
||||
|
||||
|
||||
def get_prowler_provider_checks(provider_type: Provider.ProviderChoices):
|
||||
@@ -164,10 +190,16 @@ def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
total_checks = len(requirement.Checks)
|
||||
checks_dict = {check: None for check in requirement.Checks}
|
||||
|
||||
req_status_val = "MANUAL" if total_checks == 0 else "PASS"
|
||||
|
||||
# Build requirement dictionary
|
||||
requirement_dict = {
|
||||
"name": requirement.Name or requirement.Id,
|
||||
"description": requirement.Description,
|
||||
"tactics": getattr(requirement, "Tactics", []),
|
||||
"subtechniques": getattr(requirement, "SubTechniques", []),
|
||||
"platforms": getattr(requirement, "Platforms", []),
|
||||
"technique_url": getattr(requirement, "TechniqueURL", ""),
|
||||
"attributes": [
|
||||
dict(attribute) for attribute in requirement.Attributes
|
||||
],
|
||||
@@ -178,20 +210,18 @@ def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
"manual": 0,
|
||||
"total": total_checks,
|
||||
},
|
||||
"status": "PASS",
|
||||
"status": req_status_val,
|
||||
}
|
||||
|
||||
# Update requirements status
|
||||
if total_checks == 0:
|
||||
# Update requirements status counts for the framework
|
||||
if req_status_val == "MANUAL":
|
||||
requirements_status["manual"] += 1
|
||||
elif req_status_val == "PASS":
|
||||
requirements_status["passed"] += 1
|
||||
|
||||
# Add requirement to compliance requirements
|
||||
compliance_requirements[requirement.Id] = requirement_dict
|
||||
|
||||
# Calculate pending requirements
|
||||
pending_requirements = total_requirements - requirements_status["manual"]
|
||||
requirements_status["passed"] = pending_requirements
|
||||
|
||||
# Build compliance dictionary
|
||||
compliance_dict = {
|
||||
"framework": compliance_data.Framework,
|
||||
|
||||
@@ -1,18 +1,29 @@
|
||||
ALLOWED_APPS = ("django", "socialaccount", "account", "authtoken", "silk")
|
||||
|
||||
|
||||
class MainRouter:
|
||||
default_db = "default"
|
||||
admin_db = "admin"
|
||||
|
||||
def db_for_read(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if model_table_name.startswith("django_"):
|
||||
if model_table_name.startswith("django_") or any(
|
||||
model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS
|
||||
):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def db_for_write(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if model_table_name.startswith("django_"):
|
||||
if any(model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def allow_migrate(self, db, app_label, model_name=None, **hints): # noqa: F841
|
||||
return db == self.admin_db
|
||||
|
||||
def allow_relation(self, obj1, obj2, **hints): # noqa: F841
|
||||
# Allow relations if both objects are in either "default" or "admin" db connectors
|
||||
if {obj1._state.db, obj2._state.db} <= {self.default_db, self.admin_db}:
|
||||
return True
|
||||
return None
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import re
|
||||
import secrets
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
@@ -6,6 +7,7 @@ from datetime import datetime, timedelta, timezone
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import BaseUserManager
|
||||
from django.db import connection, models, transaction
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from psycopg2 import connect as psycopg2_connect
|
||||
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
@@ -105,11 +107,12 @@ def generate_random_token(length: int = 14, symbols: str | None = None) -> str:
|
||||
return "".join(secrets.choice(symbols or _symbols) for _ in range(length))
|
||||
|
||||
|
||||
def batch_delete(queryset, batch_size=5000):
|
||||
def batch_delete(tenant_id, queryset, batch_size=settings.DJANGO_DELETION_BATCH_SIZE):
|
||||
"""
|
||||
Deletes objects in batches and returns the total number of deletions and a summary.
|
||||
|
||||
Args:
|
||||
tenant_id (str): Tenant ID the queryset belongs to.
|
||||
queryset (QuerySet): The queryset of objects to delete.
|
||||
batch_size (int): The number of objects to delete in each batch.
|
||||
|
||||
@@ -120,15 +123,16 @@ def batch_delete(queryset, batch_size=5000):
|
||||
deletion_summary = {}
|
||||
|
||||
while True:
|
||||
# Get a batch of IDs to delete
|
||||
batch_ids = set(
|
||||
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
|
||||
)
|
||||
if not batch_ids:
|
||||
# No more objects to delete
|
||||
break
|
||||
with rls_transaction(tenant_id, POSTGRES_TENANT_VAR):
|
||||
# Get a batch of IDs to delete
|
||||
batch_ids = set(
|
||||
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
|
||||
)
|
||||
if not batch_ids:
|
||||
# No more objects to delete
|
||||
break
|
||||
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
|
||||
total_deleted += deleted_count
|
||||
for model_label, count in deleted_info.items():
|
||||
@@ -137,6 +141,63 @@ def batch_delete(queryset, batch_size=5000):
|
||||
return total_deleted, deletion_summary
|
||||
|
||||
|
||||
def delete_related_daily_task(provider_id: str):
|
||||
"""
|
||||
Deletes the periodic task associated with a specific provider.
|
||||
|
||||
Args:
|
||||
provider_id (str): The unique identifier for the provider
|
||||
whose related periodic task should be deleted.
|
||||
"""
|
||||
task_name = f"scan-perform-scheduled-{provider_id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
|
||||
|
||||
def create_objects_in_batches(
|
||||
tenant_id: str, model, objects: list, batch_size: int = 500
|
||||
):
|
||||
"""
|
||||
Bulk-create model instances in repeated, per-tenant RLS transactions.
|
||||
|
||||
All chunks execute in their own transaction, so no single transaction
|
||||
grows too large.
|
||||
|
||||
Args:
|
||||
tenant_id (str): UUID string of the tenant under which to set RLS.
|
||||
model: Django model class whose `.objects.bulk_create()` will be called.
|
||||
objects (list): List of model instances (unsaved) to bulk-create.
|
||||
batch_size (int): Maximum number of objects per bulk_create call.
|
||||
"""
|
||||
total = len(objects)
|
||||
for i in range(0, total, batch_size):
|
||||
chunk = objects[i : i + batch_size]
|
||||
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
|
||||
model.objects.bulk_create(chunk, batch_size)
|
||||
|
||||
|
||||
def update_objects_in_batches(
|
||||
tenant_id: str, model, objects: list, fields: list, batch_size: int = 500
|
||||
):
|
||||
"""
|
||||
Bulk-update model instances in repeated, per-tenant RLS transactions.
|
||||
|
||||
All chunks execute in their own transaction, so no single transaction
|
||||
grows too large.
|
||||
|
||||
Args:
|
||||
tenant_id (str): UUID string of the tenant under which to set RLS.
|
||||
model: Django model class whose `.objects.bulk_update()` will be called.
|
||||
objects (list): List of model instances (saved) to bulk-update.
|
||||
fields (list): List of field names to update.
|
||||
batch_size (int): Maximum number of objects per bulk_update call.
|
||||
"""
|
||||
total = len(objects)
|
||||
for start in range(0, total, batch_size):
|
||||
chunk = objects[start : start + batch_size]
|
||||
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
|
||||
model.objects.bulk_update(chunk, fields, batch_size)
|
||||
|
||||
|
||||
# Postgres Enums
|
||||
|
||||
|
||||
@@ -212,6 +273,167 @@ def register_enum(apps, schema_editor, enum_class): # noqa: F841
|
||||
register_adapter(enum_class, enum_adapter)
|
||||
|
||||
|
||||
def _should_create_index_on_partition(
|
||||
partition_name: str, all_partitions: bool = False
|
||||
) -> bool:
|
||||
"""
|
||||
Determine if we should create an index on this partition.
|
||||
|
||||
Args:
|
||||
partition_name: The name of the partition (e.g., "findings_2025_aug", "findings_default")
|
||||
all_partitions: If True, create on all partitions. If False, only current/future partitions.
|
||||
|
||||
Returns:
|
||||
bool: True if index should be created on this partition, False otherwise.
|
||||
"""
|
||||
if all_partitions:
|
||||
return True
|
||||
|
||||
# Extract date from partition name if it follows the pattern
|
||||
# Partition names look like: findings_2025_aug, findings_2025_jul, etc.
|
||||
date_pattern = r"(\d{4})_([a-z]{3})$"
|
||||
match = re.search(date_pattern, partition_name)
|
||||
|
||||
if not match:
|
||||
# If we can't parse the date, include it to be safe (e.g., default partition)
|
||||
return True
|
||||
|
||||
try:
|
||||
year_str, month_abbr = match.groups()
|
||||
year = int(year_str)
|
||||
|
||||
# Map month abbreviations to numbers
|
||||
month_map = {
|
||||
"jan": 1,
|
||||
"feb": 2,
|
||||
"mar": 3,
|
||||
"apr": 4,
|
||||
"may": 5,
|
||||
"jun": 6,
|
||||
"jul": 7,
|
||||
"aug": 8,
|
||||
"sep": 9,
|
||||
"oct": 10,
|
||||
"nov": 11,
|
||||
"dec": 12,
|
||||
}
|
||||
|
||||
month = month_map.get(month_abbr.lower())
|
||||
if month is None:
|
||||
# Unknown month abbreviation, include it to be safe
|
||||
return True
|
||||
|
||||
partition_date = datetime(year, month, 1, tzinfo=timezone.utc)
|
||||
|
||||
# Get current month start
|
||||
now = datetime.now(timezone.utc)
|
||||
current_month_start = now.replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
|
||||
# Include current month and future partitions
|
||||
return partition_date >= current_month_start
|
||||
|
||||
except (ValueError, TypeError):
|
||||
# If date parsing fails, include it to be safe
|
||||
return True
|
||||
|
||||
|
||||
def create_index_on_partitions(
|
||||
apps, # noqa: F841
|
||||
schema_editor,
|
||||
parent_table: str,
|
||||
index_name: str,
|
||||
columns: str,
|
||||
method: str = "BTREE",
|
||||
where: str = "",
|
||||
all_partitions: bool = True,
|
||||
):
|
||||
"""
|
||||
Create an index on existing partitions of `parent_table`.
|
||||
|
||||
Args:
|
||||
parent_table: The name of the root table (e.g. "findings").
|
||||
index_name: A short name for the index (will be prefixed per-partition).
|
||||
columns: The parenthesized column list, e.g. "tenant_id, scan_id, status".
|
||||
method: The index method—BTREE, GIN, etc. Defaults to BTREE.
|
||||
where: Optional WHERE clause (without the leading "WHERE"), e.g. "status = 'FAIL'".
|
||||
all_partitions: Whether to create indexes on all partitions or just current/future ones.
|
||||
Defaults to False (current/future only) to avoid maintenance overhead
|
||||
on old partitions where the index may not be needed.
|
||||
|
||||
Examples:
|
||||
# Create index only on current and future partitions (recommended for new indexes)
|
||||
create_index_on_partitions(
|
||||
apps, schema_editor,
|
||||
parent_table="findings",
|
||||
index_name="new_performance_idx",
|
||||
columns="tenant_id, status, severity",
|
||||
all_partitions=False # Default behavior
|
||||
)
|
||||
|
||||
# Create index on all partitions (use when migrating existing critical indexes)
|
||||
create_index_on_partitions(
|
||||
apps, schema_editor,
|
||||
parent_table="findings",
|
||||
index_name="critical_existing_idx",
|
||||
columns="tenant_id, scan_id",
|
||||
all_partitions=True
|
||||
)
|
||||
"""
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
where_sql = f" WHERE {where}" if where else ""
|
||||
for partition in partitions:
|
||||
if _should_create_index_on_partition(partition, all_partitions):
|
||||
idx_name = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = (
|
||||
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {idx_name} "
|
||||
f"ON {partition} USING {method} ({columns})"
|
||||
f"{where_sql};"
|
||||
)
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def drop_index_on_partitions(
|
||||
apps, # noqa: F841
|
||||
schema_editor,
|
||||
parent_table: str,
|
||||
index_name: str,
|
||||
):
|
||||
"""
|
||||
Drop the per-partition indexes that were created by create_index_on_partitions.
|
||||
|
||||
Args:
|
||||
parent_table: The name of the root table (e.g. "findings").
|
||||
index_name: The same short name used when creating them.
|
||||
"""
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
for partition in partitions:
|
||||
idx_name = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = f"DROP INDEX CONCURRENTLY IF EXISTS {idx_name};"
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
# Postgres enum definition for member role
|
||||
|
||||
|
||||
@@ -318,3 +540,27 @@ class InvitationStateEnum(EnumType):
|
||||
class InvitationStateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("invitation_state", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Integration type
|
||||
|
||||
|
||||
class IntegrationTypeEnum(EnumType):
|
||||
enum_type_name = "integration_type"
|
||||
|
||||
|
||||
class IntegrationTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("integration_type", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Processor type
|
||||
|
||||
|
||||
class ProcessorTypeEnum(EnumType):
|
||||
enum_type_name = "processor_type"
|
||||
|
||||
|
||||
class ProcessorTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("processor_type", *args, **kwargs)
|
||||
|
||||
@@ -7,7 +7,7 @@ from rest_framework_json_api.serializers import ValidationError
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
|
||||
|
||||
def set_tenant(func):
|
||||
def set_tenant(func=None, *, keep_tenant=False):
|
||||
"""
|
||||
Decorator to set the tenant context for a Celery task based on the provided tenant_id.
|
||||
|
||||
@@ -40,20 +40,29 @@ def set_tenant(func):
|
||||
# The tenant context will be set before the task logic executes.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
@transaction.atomic
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
tenant_id = kwargs.pop("tenant_id")
|
||||
except KeyError:
|
||||
raise KeyError("This task requires the tenant_id")
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
@transaction.atomic
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
if not keep_tenant:
|
||||
tenant_id = kwargs.pop("tenant_id")
|
||||
else:
|
||||
tenant_id = kwargs["tenant_id"]
|
||||
except KeyError:
|
||||
raise KeyError("This task requires the tenant_id")
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
|
||||
return func(*args, **kwargs)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
return wrapper
|
||||
|
||||
if func is None:
|
||||
return decorator
|
||||
else:
|
||||
return decorator(func)
|
||||
|
||||
@@ -3,7 +3,7 @@ from rest_framework import status
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework_json_api.exceptions import exception_handler
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
|
||||
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
|
||||
|
||||
|
||||
class ModelValidationError(ValidationError):
|
||||
@@ -32,6 +32,36 @@ class InvitationTokenExpiredException(APIException):
|
||||
default_code = "token_expired"
|
||||
|
||||
|
||||
# Task Management Exceptions (non-HTTP)
|
||||
class TaskManagementError(Exception):
|
||||
"""Base exception for task management errors."""
|
||||
|
||||
def __init__(self, task=None):
|
||||
self.task = task
|
||||
super().__init__()
|
||||
|
||||
|
||||
class TaskFailedException(TaskManagementError):
|
||||
"""Raised when a task has failed."""
|
||||
|
||||
|
||||
class TaskNotFoundException(TaskManagementError):
|
||||
"""Raised when a task is not found."""
|
||||
|
||||
|
||||
class TaskInProgressException(TaskManagementError):
|
||||
"""Raised when a task is running but there's no related Task object to return."""
|
||||
|
||||
def __init__(self, task_result=None):
|
||||
self.task_result = task_result
|
||||
super().__init__()
|
||||
|
||||
|
||||
# Provider connection errors
|
||||
class ProviderConnectionError(Exception):
|
||||
"""Base exception for provider connection errors."""
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
if isinstance(exc, django_validation_error):
|
||||
if hasattr(exc, "error_dict"):
|
||||
@@ -39,7 +69,30 @@ def custom_exception_handler(exc, context):
|
||||
else:
|
||||
exc = ValidationError(detail=exc.messages[0], code=exc.code)
|
||||
elif isinstance(exc, (TokenError, InvalidToken)):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
if (
|
||||
hasattr(exc, "detail")
|
||||
and isinstance(exc.detail, dict)
|
||||
and "messages" in exc.detail
|
||||
):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
|
||||
|
||||
class ConflictException(APIException):
|
||||
status_code = status.HTTP_409_CONFLICT
|
||||
default_detail = "A conflict occurred. The resource already exists."
|
||||
default_code = "conflict"
|
||||
|
||||
def __init__(self, detail=None, code=None, pointer=None):
|
||||
error_detail = {
|
||||
"detail": detail or self.default_detail,
|
||||
"status": self.status_code,
|
||||
"code": self.default_code,
|
||||
}
|
||||
|
||||
if pointer:
|
||||
error_detail["source"] = {"pointer": pointer}
|
||||
|
||||
super().__init__(detail=[error_detail])
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from datetime import date, datetime, timezone
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
|
||||
from dateutil.parser import parse
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
from django_filters.rest_framework import (
|
||||
@@ -22,11 +23,13 @@ from api.db_utils import (
|
||||
StatusEnumField,
|
||||
)
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
ComplianceRequirementOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
Membership,
|
||||
PermissionChoices,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
@@ -80,6 +83,114 @@ class ChoiceInFilter(BaseInFilter, ChoiceFilter):
|
||||
pass
|
||||
|
||||
|
||||
class CommonFindingFilters(FilterSet):
|
||||
# We filter providers from the scan in findings
|
||||
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="scan__provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="in"
|
||||
)
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
uid = CharFilter(field_name="uid")
|
||||
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
|
||||
status = ChoiceFilter(choices=StatusChoices.choices)
|
||||
severity = ChoiceFilter(choices=SeverityChoices)
|
||||
impact = ChoiceFilter(choices=SeverityChoices)
|
||||
muted = BooleanFilter(
|
||||
help_text="If this filter is not provided, muted and non-muted findings will be returned."
|
||||
)
|
||||
|
||||
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
|
||||
|
||||
region = CharFilter(method="filter_resource_region")
|
||||
region__in = CharInFilter(field_name="resource_regions", lookup_expr="overlap")
|
||||
region__icontains = CharFilter(
|
||||
field_name="resource_regions", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
service = CharFilter(method="filter_resource_service")
|
||||
service__in = CharInFilter(field_name="resource_services", lookup_expr="overlap")
|
||||
service__icontains = CharFilter(
|
||||
field_name="resource_services", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_uid = CharFilter(field_name="resources__uid")
|
||||
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
|
||||
resource_uid__icontains = CharFilter(
|
||||
field_name="resources__uid", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_name = CharFilter(field_name="resources__name")
|
||||
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
|
||||
resource_name__icontains = CharFilter(
|
||||
field_name="resources__name", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_type = CharFilter(method="filter_resource_type")
|
||||
resource_type__in = CharInFilter(field_name="resource_types", lookup_expr="overlap")
|
||||
resource_type__icontains = CharFilter(
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# resource_tag_key = CharFilter(field_name="resources__tags__key")
|
||||
# resource_tag_key__in = CharInFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_key__icontains = CharFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tag_value = CharFilter(field_name="resources__tags__value")
|
||||
# resource_tag_value__in = CharInFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_value__icontains = CharFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tags = CharInFilter(
|
||||
# method="filter_resource_tag",
|
||||
# lookup_expr="in",
|
||||
# help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
|
||||
# "separated by commas.",
|
||||
# )
|
||||
|
||||
def filter_resource_service(self, queryset, name, value):
|
||||
return queryset.filter(resource_services__contains=[value])
|
||||
|
||||
def filter_resource_region(self, queryset, name, value):
|
||||
return queryset.filter(resource_regions__contains=[value])
|
||||
|
||||
def filter_resource_type(self, queryset, name, value):
|
||||
return queryset.filter(resource_types__contains=[value])
|
||||
|
||||
def filter_resource_tag(self, queryset, name, value):
|
||||
overall_query = Q()
|
||||
for key_value_pair in value:
|
||||
tag_key, tag_value = key_value_pair.split(":", 1)
|
||||
overall_query |= Q(
|
||||
resources__tags__key__icontains=tag_key,
|
||||
resources__tags__value__icontains=tag_value,
|
||||
)
|
||||
return queryset.filter(overall_query).distinct()
|
||||
|
||||
|
||||
class TenantFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
@@ -229,6 +340,8 @@ class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
tags = CharFilter(method="filter_tag")
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
scan = UUIDFilter(field_name="provider__scan", lookup_expr="exact")
|
||||
scan__in = UUIDInFilter(field_name="provider__scan", lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
@@ -243,6 +356,82 @@ class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
|
||||
self.data.get("updated_at")
|
||||
or self.data.get("updated_at__date")
|
||||
or self.data.get("updated_at__gte")
|
||||
or self.data.get("updated_at__lte")
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "At least one date filter is required: filter[updated_at], filter[updated_at.gte], "
|
||||
"or filter[updated_at.lte].",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/updated_at"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
gte_date = (
|
||||
parse(self.data.get("updated_at__gte")).date()
|
||||
if self.data.get("updated_at__gte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
lte_date = (
|
||||
parse(self.data.get("updated_at__lte")).date()
|
||||
if self.data.get("updated_at__lte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
|
||||
if abs(lte_date - gte_date) > timedelta(
|
||||
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/updated_at"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
def filter_tag_value(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
|
||||
|
||||
def filter_tag(self, queryset, name, value):
|
||||
# We won't know what the user wants to filter on just based on the value,
|
||||
# and we don't want to build special filtering logic for every possible
|
||||
# provider tag spec, so we'll just do a full text search
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class LatestResourceFilter(ProviderRelationshipFilterSet):
|
||||
tag_key = CharFilter(method="filter_tag_key")
|
||||
tag_value = CharFilter(method="filter_tag_value")
|
||||
tag = CharFilter(method="filter_tag")
|
||||
tags = CharFilter(method="filter_tag")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"uid": ["exact", "icontains"],
|
||||
"name": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
"service": ["exact", "icontains", "in"],
|
||||
"type": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
@@ -256,76 +445,20 @@ class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class FindingFilter(FilterSet):
|
||||
# We filter providers from the scan in findings
|
||||
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="scan__provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="in"
|
||||
)
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
uid = CharFilter(field_name="uid")
|
||||
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
|
||||
status = ChoiceFilter(choices=StatusChoices.choices)
|
||||
severity = ChoiceFilter(choices=SeverityChoices)
|
||||
impact = ChoiceFilter(choices=SeverityChoices)
|
||||
|
||||
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
|
||||
|
||||
region = CharFilter(field_name="resources__region")
|
||||
region__in = CharInFilter(field_name="resources__region", lookup_expr="in")
|
||||
region__icontains = CharFilter(
|
||||
field_name="resources__region", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
service = CharFilter(field_name="resources__service")
|
||||
service__in = CharInFilter(field_name="resources__service", lookup_expr="in")
|
||||
service__icontains = CharFilter(
|
||||
field_name="resources__service", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_uid = CharFilter(field_name="resources__uid")
|
||||
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
|
||||
resource_uid__icontains = CharFilter(
|
||||
field_name="resources__uid", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_name = CharFilter(field_name="resources__name")
|
||||
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
|
||||
resource_name__icontains = CharFilter(
|
||||
field_name="resources__name", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_type = CharFilter(field_name="resources__type")
|
||||
resource_type__in = CharInFilter(field_name="resources__type", lookup_expr="in")
|
||||
resource_type__icontains = CharFilter(
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
class FindingFilter(CommonFindingFilters):
|
||||
scan = UUIDFilter(method="filter_scan_id")
|
||||
scan__in = UUIDInFilter(method="filter_scan_id_in")
|
||||
|
||||
inserted_at = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__date = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__gte = DateFilter(method="filter_inserted_at_gte")
|
||||
inserted_at__lte = DateFilter(method="filter_inserted_at_lte")
|
||||
inserted_at__gte = DateFilter(
|
||||
method="filter_inserted_at_gte",
|
||||
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
)
|
||||
inserted_at__lte = DateFilter(
|
||||
method="filter_inserted_at_lte",
|
||||
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Finding
|
||||
@@ -353,6 +486,61 @@ class FindingFilter(FilterSet):
|
||||
},
|
||||
}
|
||||
|
||||
def filter_resource_type(self, queryset, name, value):
|
||||
return queryset.filter(resource_types__contains=[value])
|
||||
|
||||
def filter_resource_region(self, queryset, name, value):
|
||||
return queryset.filter(resource_regions__contains=[value])
|
||||
|
||||
def filter_resource_service(self, queryset, name, value):
|
||||
return queryset.filter(resource_services__contains=[value])
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
|
||||
self.data.get("inserted_at")
|
||||
or self.data.get("inserted_at__date")
|
||||
or self.data.get("inserted_at__gte")
|
||||
or self.data.get("inserted_at__lte")
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "At least one date filter is required: filter[inserted_at], filter[inserted_at.gte], "
|
||||
"or filter[inserted_at.lte].",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/inserted_at"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
gte_date = (
|
||||
datetime.strptime(self.data.get("inserted_at__gte"), "%Y-%m-%d").date()
|
||||
if self.data.get("inserted_at__gte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
lte_date = (
|
||||
datetime.strptime(self.data.get("inserted_at__lte"), "%Y-%m-%d").date()
|
||||
if self.data.get("inserted_at__lte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
|
||||
if abs(lte_date - gte_date) > timedelta(
|
||||
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/inserted_at"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
# Convert filter values to UUIDv7 values for use with partitioning
|
||||
def filter_scan_id(self, queryset, name, value):
|
||||
try:
|
||||
@@ -373,9 +561,7 @@ class FindingFilter(FilterSet):
|
||||
)
|
||||
|
||||
return (
|
||||
queryset.filter(id__gte=start)
|
||||
.filter(id__lt=end)
|
||||
.filter(scan__id=value_uuid)
|
||||
queryset.filter(id__gte=start).filter(id__lt=end).filter(scan_id=value_uuid)
|
||||
)
|
||||
|
||||
def filter_scan_id_in(self, queryset, name, value):
|
||||
@@ -400,31 +586,32 @@ class FindingFilter(FilterSet):
|
||||
]
|
||||
)
|
||||
if start == end:
|
||||
return queryset.filter(id__gte=start).filter(scan__id__in=uuid_list)
|
||||
return queryset.filter(id__gte=start).filter(scan_id__in=uuid_list)
|
||||
else:
|
||||
return (
|
||||
queryset.filter(id__gte=start)
|
||||
.filter(id__lt=end)
|
||||
.filter(scan__id__in=uuid_list)
|
||||
.filter(scan_id__in=uuid_list)
|
||||
)
|
||||
|
||||
def filter_inserted_at(self, queryset, name, value):
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(value))
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(datetime_value))
|
||||
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
|
||||
|
||||
return queryset.filter(id__gte=start).filter(inserted_at__date=value)
|
||||
return queryset.filter(id__gte=start, id__lt=end)
|
||||
|
||||
def filter_inserted_at_gte(self, queryset, name, value):
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(value))
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(datetime_value))
|
||||
|
||||
return queryset.filter(id__gte=start).filter(inserted_at__gte=value)
|
||||
return queryset.filter(id__gte=start)
|
||||
|
||||
def filter_inserted_at_lte(self, queryset, name, value):
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
end = uuid7_start(datetime_to_uuid7(value))
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
|
||||
|
||||
return queryset.filter(id__lte=end).filter(inserted_at__lte=value)
|
||||
return queryset.filter(id__lt=end)
|
||||
|
||||
@staticmethod
|
||||
def maybe_date_to_datetime(value):
|
||||
@@ -434,6 +621,31 @@ class FindingFilter(FilterSet):
|
||||
return dt
|
||||
|
||||
|
||||
class LatestFindingFilter(CommonFindingFilters):
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "in"],
|
||||
"delta": ["exact", "in"],
|
||||
"status": ["exact", "in"],
|
||||
"severity": ["exact", "in"],
|
||||
"impact": ["exact", "in"],
|
||||
"check_id": ["exact", "in", "icontains"],
|
||||
}
|
||||
filter_overrides = {
|
||||
FindingDeltaEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
StatusEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
SeverityEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ProviderSecretFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
@@ -505,12 +717,11 @@ class RoleFilter(FilterSet):
|
||||
|
||||
class ComplianceOverviewFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
provider_type__in = ChoiceInFilter(choices=Provider.ProviderChoices.choices)
|
||||
scan_id = UUIDFilter(field_name="scan__id")
|
||||
scan_id = UUIDFilter(field_name="scan_id")
|
||||
region = CharFilter(field_name="region")
|
||||
|
||||
class Meta:
|
||||
model = ComplianceOverview
|
||||
model = ComplianceRequirementOverview
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"compliance_id": ["exact", "icontains"],
|
||||
@@ -530,12 +741,6 @@ class ScanSummaryFilter(FilterSet):
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
region = CharFilter(field_name="region")
|
||||
muted_findings = BooleanFilter(method="filter_muted_findings")
|
||||
|
||||
def filter_muted_findings(self, queryset, name, value):
|
||||
if not value:
|
||||
return queryset.exclude(muted__gt=0)
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
model = ScanSummary
|
||||
@@ -546,8 +751,6 @@ class ScanSummaryFilter(FilterSet):
|
||||
|
||||
|
||||
class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
muted_findings = None
|
||||
|
||||
def is_valid(self):
|
||||
# Check if at least one of the inserted_at filters is present
|
||||
inserted_at_filters = [
|
||||
@@ -565,3 +768,28 @@ class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
}
|
||||
)
|
||||
return super().is_valid()
|
||||
|
||||
|
||||
class IntegrationFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
integration_type = ChoiceFilter(choices=Integration.IntegrationChoices.choices)
|
||||
integration_type__in = ChoiceInFilter(
|
||||
choices=Integration.IntegrationChoices.choices,
|
||||
field_name="integration_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ProcessorFilter(FilterSet):
|
||||
processor_type = ChoiceFilter(choices=Processor.ProcessorChoices.choices)
|
||||
processor_type__in = ChoiceInFilter(
|
||||
choices=Processor.ProcessorChoices.choices,
|
||||
field_name="processor_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"model": "api.user",
|
||||
"pk": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"last_login": null,
|
||||
"name": "Devie Prowlerson",
|
||||
"email": "dev@prowler.com",
|
||||
@@ -16,7 +16,7 @@
|
||||
"model": "api.user",
|
||||
"pk": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"last_login": null,
|
||||
"name": "Devietoo Prowlerson",
|
||||
"email": "dev2@prowler.com",
|
||||
|
||||
@@ -122,6 +122,22 @@
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.352Z",
|
||||
"updated_at": "2024-10-18T11:16:23.533Z",
|
||||
"provider": "kubernetes",
|
||||
"uid": "gke_lucky-coast-419309_us-central1_autopilot-cluster-2",
|
||||
"alias": "k8s_testing_2",
|
||||
"connected": true,
|
||||
"connection_last_checked_at": "2024-10-18T11:16:23.503Z",
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "11491b47-75ae-4f71-ad8d-3e630a72182e",
|
||||
|
||||
@@ -11,9 +11,7 @@
|
||||
"unique_resource_count": 1,
|
||||
"duration": 5,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"inserted_at": "2024-09-01T17:25:27.050Z",
|
||||
"started_at": "2024-09-01T17:25:27.050Z",
|
||||
@@ -33,9 +31,7 @@
|
||||
"unique_resource_count": 1,
|
||||
"duration": 20,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"inserted_at": "2024-09-02T17:24:27.050Z",
|
||||
"started_at": "2024-09-02T17:24:27.050Z",
|
||||
@@ -55,9 +51,7 @@
|
||||
"unique_resource_count": 10,
|
||||
"duration": 10,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"cloudsql_instance_automated_backups"
|
||||
]
|
||||
"checks_to_execute": ["cloudsql_instance_automated_backups"]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:26:27.050Z",
|
||||
"started_at": "2024-09-02T19:26:27.050Z",
|
||||
@@ -77,9 +71,7 @@
|
||||
"unique_resource_count": 1,
|
||||
"duration": 35,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:27:27.050Z",
|
||||
"started_at": "2024-09-02T19:27:27.050Z",
|
||||
@@ -97,9 +89,7 @@
|
||||
"name": "test scheduled aws scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"cloudformation_stack_outputs_find_secrets"
|
||||
]
|
||||
"checks_to_execute": ["cloudformation_stack_outputs_find_secrets"]
|
||||
},
|
||||
"scheduled_at": "2030-09-02T19:20:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:24:27.050Z",
|
||||
@@ -178,9 +168,7 @@
|
||||
"unique_resource_count": 19,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"duration": 7,
|
||||
"scheduled_at": null,
|
||||
@@ -190,6 +178,56 @@
|
||||
"completed_at": "2024-10-18T10:46:05.127Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "6dd8925f-a52d-48de-a546-d2d90db30ab1",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan azure",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "4ca7ce89-3236-41a8-a369-8937bc152af5",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan k8s",
|
||||
"provider": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01929f57-c0ee-7553-be0b-cbde006fb6f7",
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.823Z",
|
||||
"updated_at": "2024-10-18T10:46:04.841Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.823Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-south-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -61,6 +62,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.855Z",
|
||||
"updated_at": "2024-10-18T10:46:04.858Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.855Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-3-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -116,6 +118,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.869Z",
|
||||
"updated_at": "2024-10-18T10:46:04.876Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.869Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -171,6 +174,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.888Z",
|
||||
"updated_at": "2024-10-18T10:46:04.892Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.888Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -226,6 +230,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.901Z",
|
||||
"updated_at": "2024-10-18T10:46:04.905Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.901Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -281,6 +286,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.915Z",
|
||||
"updated_at": "2024-10-18T10:46:04.919Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.915Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-south-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -336,6 +342,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.929Z",
|
||||
"updated_at": "2024-10-18T10:46:04.934Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.929Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -391,6 +398,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.944Z",
|
||||
"updated_at": "2024-10-18T10:46:04.947Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.944Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ca-central-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -446,6 +454,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.957Z",
|
||||
"updated_at": "2024-10-18T10:46:04.962Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.957Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-1-ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
|
||||
"delta": "new",
|
||||
"status": "PASS",
|
||||
@@ -501,6 +510,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.971Z",
|
||||
"updated_at": "2024-10-18T10:46:04.975Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.971Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -556,6 +566,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.984Z",
|
||||
"updated_at": "2024-10-18T10:46:04.989Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.984Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-sa-east-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -611,6 +622,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.999Z",
|
||||
"updated_at": "2024-10-18T10:46:05.003Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.999Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-north-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -666,6 +678,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.013Z",
|
||||
"updated_at": "2024-10-18T10:46:05.018Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.013Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -721,6 +734,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.029Z",
|
||||
"updated_at": "2024-10-18T10:46:05.033Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.029Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -776,6 +790,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.045Z",
|
||||
"updated_at": "2024-10-18T10:46:05.050Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.045Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -831,6 +846,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.061Z",
|
||||
"updated_at": "2024-10-18T10:46:05.065Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.061Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -886,6 +902,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.080Z",
|
||||
"updated_at": "2024-10-18T10:46:05.085Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.080Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -941,6 +958,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.099Z",
|
||||
"updated_at": "2024-10-18T10:46:05.104Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.099Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-2-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -996,6 +1014,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.115Z",
|
||||
"updated_at": "2024-10-18T10:46:05.121Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.115Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-3-112233445566",
|
||||
"delta": "new",
|
||||
"status": "FAIL",
|
||||
@@ -1051,6 +1070,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.489Z",
|
||||
"updated_at": "2024-10-18T11:16:24.506Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.823Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-south-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1106,6 +1126,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.518Z",
|
||||
"updated_at": "2024-10-18T11:16:24.521Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.855Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-3-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1161,6 +1182,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.526Z",
|
||||
"updated_at": "2024-10-18T11:16:24.529Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.869Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1216,6 +1238,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.535Z",
|
||||
"updated_at": "2024-10-18T11:16:24.538Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.888Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1271,6 +1294,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.544Z",
|
||||
"updated_at": "2024-10-18T11:16:24.546Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.901Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1326,6 +1350,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.551Z",
|
||||
"updated_at": "2024-10-18T11:16:24.554Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.915Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-south-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1381,6 +1406,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.560Z",
|
||||
"updated_at": "2024-10-18T11:16:24.562Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.929Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1436,6 +1462,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.567Z",
|
||||
"updated_at": "2024-10-18T11:16:24.569Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.944Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ca-central-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1491,6 +1518,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.573Z",
|
||||
"updated_at": "2024-10-18T11:16:24.575Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.957Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-1-ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
|
||||
"delta": null,
|
||||
"status": "PASS",
|
||||
@@ -1546,6 +1574,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.580Z",
|
||||
"updated_at": "2024-10-18T11:16:24.582Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.971Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1601,6 +1630,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.587Z",
|
||||
"updated_at": "2024-10-18T11:16:24.589Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.984Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-sa-east-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1656,6 +1686,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.595Z",
|
||||
"updated_at": "2024-10-18T11:16:24.597Z",
|
||||
"first_seen_at": "2024-10-18T10:46:04.999Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-north-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1711,6 +1742,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.602Z",
|
||||
"updated_at": "2024-10-18T11:16:24.604Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.013Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1766,6 +1798,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.610Z",
|
||||
"updated_at": "2024-10-18T11:16:24.612Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.029Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1821,6 +1854,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.617Z",
|
||||
"updated_at": "2024-10-18T11:16:24.620Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.045Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1876,6 +1910,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.625Z",
|
||||
"updated_at": "2024-10-18T11:16:24.627Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.061Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-1-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1931,6 +1966,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.632Z",
|
||||
"updated_at": "2024-10-18T11:16:24.634Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.080Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -1986,6 +2022,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.639Z",
|
||||
"updated_at": "2024-10-18T11:16:24.642Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.099Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-2-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -2041,6 +2078,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:24.646Z",
|
||||
"updated_at": "2024-10-18T11:16:24.648Z",
|
||||
"first_seen_at": "2024-10-18T10:46:05.115Z",
|
||||
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-3-112233445566",
|
||||
"delta": null,
|
||||
"status": "FAIL",
|
||||
@@ -2096,6 +2134,7 @@
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:26.033Z",
|
||||
"updated_at": "2024-10-18T11:16:26.045Z",
|
||||
"first_seen_at": "2024-10-18T11:16:26.033Z",
|
||||
"uid": "prowler-aws-account_security_contact_information_is_registered-112233445566-us-east-1-112233445566",
|
||||
"delta": "new",
|
||||
"status": "MANUAL",
|
||||
|
||||
@@ -0,0 +1,80 @@
|
||||
from django.contrib.sites.models import Site
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
|
||||
|
||||
def table_exists(table_name):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_name = %s
|
||||
)
|
||||
""",
|
||||
[table_name],
|
||||
)
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Fix migration inconsistency between socialaccount and sites"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--database",
|
||||
default=DEFAULT_DB_ALIAS,
|
||||
help="Specifies the database to operate on.",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
db = options["database"]
|
||||
connection = connections[db]
|
||||
recorder = MigrationRecorder(connection)
|
||||
|
||||
applied = set(recorder.applied_migrations())
|
||||
|
||||
has_social = ("socialaccount", "0001_initial") in applied
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_name = 'django_site'
|
||||
);
|
||||
"""
|
||||
)
|
||||
site_table_exists = cursor.fetchone()[0]
|
||||
|
||||
if has_social and not site_table_exists:
|
||||
self.stdout.write(
|
||||
f"Detected inconsistency in '{db}'. Creating 'django_site' table manually..."
|
||||
)
|
||||
|
||||
with transaction.atomic(using=db):
|
||||
with connection.schema_editor() as schema_editor:
|
||||
schema_editor.create_model(Site)
|
||||
|
||||
recorder.record_applied("sites", "0001_initial")
|
||||
recorder.record_applied("sites", "0002_alter_domain_unique")
|
||||
|
||||
self.stdout.write(
|
||||
"Fixed: 'django_site' table created and migrations registered."
|
||||
)
|
||||
|
||||
# Ensure the relationship table also exists
|
||||
if not table_exists("socialaccount_socialapp_sites"):
|
||||
self.stdout.write(
|
||||
"Detected missing 'socialaccount_socialapp_sites' table. Creating manually..."
|
||||
)
|
||||
with connection.schema_editor() as schema_editor:
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
|
||||
schema_editor.create_model(
|
||||
SocialApp._meta.get_field("sites").remote_field.through
|
||||
)
|
||||
self.stdout.write(
|
||||
"Fixed: 'socialaccount_socialapp_sites' table created."
|
||||
)
|
||||
285
api/src/backend/api/management/commands/findings.py
Normal file
285
api/src/backend/api/management/commands/findings.py
Normal file
@@ -0,0 +1,285 @@
|
||||
import random
|
||||
from datetime import datetime, timezone
|
||||
from math import ceil
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from tqdm import tqdm
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
Finding,
|
||||
Provider,
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
ResourceScanSummary,
|
||||
Scan,
|
||||
StatusChoices,
|
||||
)
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Populates the database with test data for performance testing."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--tenant",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Tenant id for which the data will be populated.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--resources",
|
||||
type=int,
|
||||
required=True,
|
||||
help="The number of resources to create.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--findings",
|
||||
type=int,
|
||||
required=True,
|
||||
help="The number of findings to create.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--batch", type=int, required=True, help="The batch size for bulk creation."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--alias",
|
||||
type=str,
|
||||
required=False,
|
||||
help="Optional alias for the provider and scan",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
tenant_id = options["tenant"]
|
||||
num_resources = options["resources"]
|
||||
num_findings = options["findings"]
|
||||
batch_size = options["batch"]
|
||||
alias = options["alias"] or "Testing"
|
||||
uid_token = str(uuid4())
|
||||
|
||||
self.stdout.write(self.style.NOTICE("Starting data population"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tTenant: {tenant_id}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tAlias: {alias}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tResources: {num_resources}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tFindings: {num_findings}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tBatch size: {batch_size}\n\n"))
|
||||
|
||||
# Resource metadata
|
||||
possible_regions = [
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-1",
|
||||
"us-west-2",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-south-1",
|
||||
"sa-east-1",
|
||||
]
|
||||
possible_services = []
|
||||
possible_types = []
|
||||
|
||||
bulk_check_metadata = CheckMetadata.get_bulk(provider="aws")
|
||||
for check_metadata in bulk_check_metadata.values():
|
||||
if check_metadata.ServiceName not in possible_services:
|
||||
possible_services.append(check_metadata.ServiceName)
|
||||
if (
|
||||
check_metadata.ResourceType
|
||||
and check_metadata.ResourceType not in possible_types
|
||||
):
|
||||
possible_types.append(check_metadata.ResourceType)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
provider, _ = Provider.all_objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider="aws",
|
||||
connected=True,
|
||||
uid=str(random.randint(100000000000, 999999999999)),
|
||||
defaults={
|
||||
"alias": alias,
|
||||
},
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan = Scan.all_objects.create(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
name=alias,
|
||||
trigger="manual",
|
||||
state="executing",
|
||||
progress=0,
|
||||
started_at=datetime.now(timezone.utc),
|
||||
)
|
||||
scan_state = "completed"
|
||||
|
||||
try:
|
||||
# Create resources
|
||||
resources = []
|
||||
|
||||
for i in range(num_resources):
|
||||
resources.append(
|
||||
Resource(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider.id,
|
||||
uid=f"testing-{uid_token}-{i}",
|
||||
name=f"Testing {uid_token}-{i}",
|
||||
region=random.choice(possible_regions),
|
||||
service=random.choice(possible_services),
|
||||
type=random.choice(possible_types),
|
||||
inserted_at="2024-10-01T00:00:00Z",
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(resources) / batch_size)
|
||||
self.stdout.write(self.style.WARNING("Creating resources..."))
|
||||
for i in tqdm(range(0, len(resources), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
Resource.all_objects.bulk_create(resources[i : i + batch_size])
|
||||
self.stdout.write(self.style.SUCCESS("Resources created successfully.\n\n"))
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 33
|
||||
scan.save()
|
||||
|
||||
# Create Findings
|
||||
findings = []
|
||||
possible_deltas = ["new", "changed", None]
|
||||
possible_severities = ["critical", "high", "medium", "low"]
|
||||
findings_resources_mapping = []
|
||||
|
||||
for i in range(num_findings):
|
||||
severity = random.choice(possible_severities)
|
||||
check_id = random.randint(1, 1000)
|
||||
assigned_resource_num = random.randint(0, len(resources) - 1)
|
||||
assigned_resource = resources[assigned_resource_num]
|
||||
findings_resources_mapping.append(assigned_resource_num)
|
||||
|
||||
findings.append(
|
||||
Finding(
|
||||
tenant_id=tenant_id,
|
||||
scan=scan,
|
||||
uid=f"testing-{uid_token}-{i}",
|
||||
delta=random.choice(possible_deltas),
|
||||
check_id=f"check-{check_id}",
|
||||
status=random.choice(list(StatusChoices)),
|
||||
severity=severity,
|
||||
impact=severity,
|
||||
raw_result={},
|
||||
check_metadata={
|
||||
"checktitle": f"Test title for check {check_id}",
|
||||
"risk": f"Testing risk {uid_token}-{i}",
|
||||
"provider": "aws",
|
||||
"severity": severity,
|
||||
"categories": ["category1", "category2", "category3"],
|
||||
"description": "This is a random description that should not matter for testing purposes.",
|
||||
"servicename": assigned_resource.service,
|
||||
"resourcetype": assigned_resource.type,
|
||||
},
|
||||
resource_types=[assigned_resource.type],
|
||||
resource_regions=[assigned_resource.region],
|
||||
resource_services=[assigned_resource.service],
|
||||
inserted_at="2024-10-01T00:00:00Z",
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(findings) / batch_size)
|
||||
self.stdout.write(self.style.WARNING("Creating findings..."))
|
||||
for i in tqdm(range(0, len(findings), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
Finding.all_objects.bulk_create(findings[i : i + batch_size])
|
||||
self.stdout.write(self.style.SUCCESS("Findings created successfully.\n\n"))
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 66
|
||||
scan.save()
|
||||
|
||||
# Create ResourceFindingMapping
|
||||
mappings = []
|
||||
scan_resource_cache: set[tuple] = set()
|
||||
for index, finding_instance in enumerate(findings):
|
||||
resource_instance = resources[findings_resources_mapping[index]]
|
||||
mappings.append(
|
||||
ResourceFindingMapping(
|
||||
tenant_id=tenant_id,
|
||||
resource=resource_instance,
|
||||
finding=finding_instance,
|
||||
)
|
||||
)
|
||||
scan_resource_cache.add(
|
||||
(
|
||||
str(resource_instance.id),
|
||||
resource_instance.service,
|
||||
resource_instance.region,
|
||||
resource_instance.type,
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(mappings) / batch_size)
|
||||
self.stdout.write(
|
||||
self.style.WARNING("Creating resource-finding mappings...")
|
||||
)
|
||||
for i in tqdm(range(0, len(mappings), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
ResourceFindingMapping.objects.bulk_create(
|
||||
mappings[i : i + batch_size]
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Resource-finding mappings created successfully.\n\n"
|
||||
)
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 99
|
||||
scan.save()
|
||||
|
||||
self.stdout.write(self.style.WARNING("Creating finding filter values..."))
|
||||
resource_scan_summaries = [
|
||||
ResourceScanSummary(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=str(scan.id),
|
||||
resource_id=resource_id,
|
||||
service=service,
|
||||
region=region,
|
||||
resource_type=resource_type,
|
||||
)
|
||||
for resource_id, service, region, resource_type in scan_resource_cache
|
||||
]
|
||||
num_batches = ceil(len(resource_scan_summaries) / batch_size)
|
||||
with rls_transaction(tenant_id):
|
||||
for i in tqdm(
|
||||
range(0, len(resource_scan_summaries), batch_size),
|
||||
total=num_batches,
|
||||
):
|
||||
with rls_transaction(tenant_id):
|
||||
ResourceScanSummary.objects.bulk_create(
|
||||
resource_scan_summaries[i : i + batch_size],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Finding filter values created successfully.\n\n")
|
||||
)
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"Failed to populate test data: {e}"))
|
||||
scan_state = "failed"
|
||||
finally:
|
||||
scan.completed_at = datetime.now(timezone.utc)
|
||||
scan.duration = int(
|
||||
(datetime.now(timezone.utc) - scan.started_at).total_seconds()
|
||||
)
|
||||
scan.progress = 100
|
||||
scan.state = scan_state
|
||||
scan.unique_resource_count = num_resources
|
||||
with rls_transaction(tenant_id):
|
||||
scan.save()
|
||||
|
||||
self.stdout.write(self.style.NOTICE("Successfully populated test data."))
|
||||
15
api/src/backend/api/migrations/0006_findings_first_seen.py
Normal file
15
api/src/backend/api/migrations/0006_findings_first_seen.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0005_rbac_missing_admin_roles"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="first_seen_at",
|
||||
field=models.DateTimeField(editable=False, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 5.1.5 on 2025-01-28 15:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0006_findings_first_seen"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_id", "state", "inserted_at"],
|
||||
name="scans_prov_state_insert_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="scansummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="scan_summaries_tenant_scan_idx"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,64 @@
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Scan, StateChoices
|
||||
|
||||
|
||||
def migrate_daily_scheduled_scan_tasks(apps, schema_editor):
|
||||
for daily_scheduled_scan_task in PeriodicTask.objects.filter(
|
||||
task="scan-perform-scheduled"
|
||||
):
|
||||
task_kwargs = json.loads(daily_scheduled_scan_task.kwargs)
|
||||
tenant_id = task_kwargs["tenant_id"]
|
||||
provider_id = task_kwargs["provider_id"]
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
scheduled_time_today = datetime.combine(
|
||||
current_time.date(),
|
||||
daily_scheduled_scan_task.start_time.time(),
|
||||
tzinfo=timezone.utc,
|
||||
)
|
||||
|
||||
if current_time < scheduled_time_today:
|
||||
next_scan_date = scheduled_time_today
|
||||
else:
|
||||
next_scan_date = scheduled_time_today + timedelta(days=1)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
Scan.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
name="Daily scheduled scan",
|
||||
provider_id=provider_id,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduled_at=next_scan_date,
|
||||
scheduler_task_id=daily_scheduled_scan_task.id,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0007_scan_and_scan_summaries_indexes"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="scan",
|
||||
name="scheduler_task",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="django_celery_beat.periodictask",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(migrate_daily_scheduled_scan_tasks),
|
||||
]
|
||||
@@ -0,0 +1,22 @@
|
||||
# Generated by Django 5.1.5 on 2025-02-07 09:42
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0008_daily_scheduled_tasks_update"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="uid",
|
||||
field=models.CharField(
|
||||
max_length=250,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
verbose_name="Unique identifier for the provider, set by the provider",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,109 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import connection, migrations
|
||||
|
||||
|
||||
def create_index_on_partitions(
|
||||
apps, schema_editor, parent_table: str, index_name: str, index_details: str
|
||||
):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass;
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
# Iterate over partitions and create index concurrently.
|
||||
# Note: PostgreSQL does not allow CONCURRENTLY inside a transaction,
|
||||
# so we need atomic = False for this migration.
|
||||
for partition in partitions:
|
||||
sql = (
|
||||
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {partition.replace('.', '_')}_{index_name} ON {partition} "
|
||||
f"{index_details};"
|
||||
)
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def drop_index_on_partitions(apps, schema_editor, parent_table: str, index_name: str):
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass;
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
# Iterate over partitions and drop index concurrently.
|
||||
for partition in partitions:
|
||||
partition_index = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = f"DROP INDEX CONCURRENTLY IF EXISTS {partition_index};"
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0009_increase_provider_uid_maximum_length"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_tenant_and_id_idx",
|
||||
index_details="(tenant_id, id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_tenant_and_id_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_idx",
|
||||
index_details="(tenant_id, scan_id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_id_idx",
|
||||
index_details="(tenant_id, scan_id, id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_id_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_delta_new_idx",
|
||||
index_details="(tenant_id, id) where delta = 'new'",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_delta_new_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,49 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0010_findings_performance_indexes_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "id"], name="findings_tenant_and_id_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="find_tenant_scan_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "id"], name="find_tenant_scan_id_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
condition=models.Q(("delta", "new")),
|
||||
fields=["tenant_id", "id"],
|
||||
name="find_delta_new_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="resourcetagmapping",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "resource_id"], name="resource_tag_tenant_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "service", "region", "type"],
|
||||
name="resource_tenant_metadata_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
15
api/src/backend/api/migrations/0012_scan_report_output.py
Normal file
15
api/src/backend/api/migrations/0012_scan_report_output.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0011_findings_performance_indexes_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="scan",
|
||||
name="output_location",
|
||||
field=models.CharField(blank=True, max_length=200, null=True),
|
||||
),
|
||||
]
|
||||
35
api/src/backend/api/migrations/0013_integrations_enum.py
Normal file
35
api/src/backend/api/migrations/0013_integrations_enum.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import IntegrationTypeEnum, PostgresEnumMigration, register_enum
|
||||
from api.models import Integration
|
||||
|
||||
IntegrationTypeEnumMigration = PostgresEnumMigration(
|
||||
enum_name="integration_type",
|
||||
enum_values=tuple(
|
||||
integration_type[0]
|
||||
for integration_type in Integration.IntegrationChoices.choices
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0012_scan_report_output"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
IntegrationTypeEnumMigration.create_enum_type,
|
||||
reverse_code=IntegrationTypeEnumMigration.drop_enum_type,
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(register_enum, enum_class=IntegrationTypeEnum),
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
131
api/src/backend/api/migrations/0014_integrations.py
Normal file
131
api/src/backend/api/migrations/0014_integrations.py
Normal file
@@ -0,0 +1,131 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0013_integrations_enum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Integration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("enabled", models.BooleanField(default=False)),
|
||||
("connected", models.BooleanField(blank=True, null=True)),
|
||||
(
|
||||
"connection_last_checked_at",
|
||||
models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
(
|
||||
"integration_type",
|
||||
api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("saml", "SAML"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
),
|
||||
),
|
||||
("configuration", models.JSONField(default=dict)),
|
||||
("_credentials", models.BinaryField(db_column="credentials")),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={"db_table": "integrations", "abstract": False},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="integration",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="IntegrationProviderRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"integration",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="api.integration",
|
||||
),
|
||||
),
|
||||
(
|
||||
"provider",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.provider"
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "integration_provider_mappings",
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("integration_id", "provider_id"),
|
||||
name="unique_integration_provider_rel",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="IntegrationProviderRelationship",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integrationproviderrelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="integration",
|
||||
name="providers",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="integrations",
|
||||
through="api.IntegrationProviderRelationship",
|
||||
to="api.provider",
|
||||
),
|
||||
),
|
||||
]
|
||||
26
api/src/backend/api/migrations/0015_finding_muted.py
Normal file
26
api/src/backend/api/migrations/0015_finding_muted.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-25 11:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0014_integrations"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="muted",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="finding",
|
||||
name="status",
|
||||
field=api.db_utils.StatusEnumField(
|
||||
choices=[("FAIL", "Fail"), ("PASS", "Pass"), ("MANUAL", "Manual")]
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,32 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-31 10:46
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0015_finding_muted"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="compliance",
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="details",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="metadata",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="partition",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
32
api/src/backend/api/migrations/0017_m365_provider.py
Normal file
32
api/src/backend/api/migrations/0017_m365_provider.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# Generated by Django 5.1.7 on 2025-04-16 08:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0016_finding_compliance_resource_details_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="provider",
|
||||
field=api.db_utils.ProviderEnumField(
|
||||
choices=[
|
||||
("aws", "AWS"),
|
||||
("azure", "Azure"),
|
||||
("gcp", "GCP"),
|
||||
("kubernetes", "Kubernetes"),
|
||||
("m365", "M365"),
|
||||
],
|
||||
default="aws",
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'm365';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,81 @@
|
||||
# Generated by Django 5.1.7 on 2025-05-05 10:01
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
import uuid6
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0017_m365_provider"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ResourceScanSummary",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("scan_id", models.UUIDField(db_index=True, default=uuid6.uuid7)),
|
||||
("resource_id", models.UUIDField(db_index=True, default=uuid.uuid4)),
|
||||
("service", models.CharField(max_length=100)),
|
||||
("region", models.CharField(max_length=100)),
|
||||
("resource_type", models.CharField(max_length=100)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "resource_scan_summaries",
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service"],
|
||||
name="rss_tenant_scan_svc_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region"],
|
||||
name="rss_tenant_scan_reg_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "resource_type"],
|
||||
name="rss_tenant_scan_type_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region", "service"],
|
||||
name="rss_tenant_scan_reg_svc_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "service", "resource_type"],
|
||||
name="rss_tenant_scan_svc_type_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "region", "resource_type"],
|
||||
name="rss_tenant_scan_reg_type_idx",
|
||||
),
|
||||
],
|
||||
"unique_together": {("tenant_id", "scan_id", "resource_id")},
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="resourcescansummary",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_resourcescansummary",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,42 @@
|
||||
import django.contrib.postgres.fields
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0018_resource_scan_summaries"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="resource_regions",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="resource_services",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="resource_types",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,86 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0019_finding_denormalize_resource_fields"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_service_idx",
|
||||
columns="resource_services",
|
||||
method="GIN",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_service_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_region_idx",
|
||||
columns="resource_regions",
|
||||
method="GIN",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_region_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_rtype_idx",
|
||||
columns="resource_types",
|
||||
method="GIN",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_rtype_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_uid_idx",
|
||||
),
|
||||
reverse_code=partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_uid_idx",
|
||||
columns="uid",
|
||||
method="BTREE",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_filter_idx",
|
||||
),
|
||||
reverse_code=partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_filter_idx",
|
||||
columns="scan_id, impact, severity, status, check_id, delta",
|
||||
method="BTREE",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,37 @@
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0020_findings_new_performance_indexes_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=["resource_services"], name="gin_find_service_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=["resource_regions"], name="gin_find_region_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=["resource_types"], name="gin_find_rtype_idx"
|
||||
),
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="finding",
|
||||
name="findings_uid_idx",
|
||||
),
|
||||
migrations.RemoveIndex(
|
||||
model_name="finding",
|
||||
name="findings_filter_idx",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,38 @@
|
||||
# Generated by Django 5.1.8 on 2025-05-12 10:04
|
||||
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0021_findings_new_performance_indexes_parent"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state", "completed")),
|
||||
fields=["tenant_id", "provider_id", "state", "-inserted_at"],
|
||||
name="scans_prov_state_ins_desc_idx",
|
||||
),
|
||||
),
|
||||
AddIndexConcurrently(
|
||||
model_name="scansummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "service"],
|
||||
name="ss_tenant_scan_service_idx",
|
||||
),
|
||||
),
|
||||
AddIndexConcurrently(
|
||||
model_name="scansummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "severity"],
|
||||
name="ss_tenant_scan_severity_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 5.1.8 on 2025-05-12 10:18
|
||||
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0022_scan_summaries_performance_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "id"], name="resources_tenant_id_idx"
|
||||
),
|
||||
),
|
||||
AddIndexConcurrently(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="resources_tenant_provider_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,29 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0023_resources_lookup_optimization"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_uid_inserted_idx",
|
||||
columns="tenant_id, uid, inserted_at DESC",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_uid_inserted_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0024_findings_uid_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "uid", "-inserted_at"],
|
||||
name="find_tenant_uid_inserted_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,14 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0025_findings_uid_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider_secret_type ADD VALUE IF NOT EXISTS 'service_account';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,124 @@
|
||||
# Generated by Django 5.1.8 on 2025-05-21 11:37
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0026_provider_secret_gcp_service_account"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ComplianceRequirementOverview",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("compliance_id", models.TextField(blank=False)),
|
||||
("framework", models.TextField(blank=False)),
|
||||
("version", models.TextField(blank=True)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("region", models.TextField(blank=False)),
|
||||
("requirement_id", models.TextField(blank=False)),
|
||||
(
|
||||
"requirement_status",
|
||||
api.db_utils.StatusEnumField(
|
||||
choices=[
|
||||
("FAIL", "Fail"),
|
||||
("PASS", "Pass"),
|
||||
("MANUAL", "Manual"),
|
||||
]
|
||||
),
|
||||
),
|
||||
("passed_checks", models.IntegerField(default=0)),
|
||||
("failed_checks", models.IntegerField(default=0)),
|
||||
("total_checks", models.IntegerField(default=0)),
|
||||
(
|
||||
"scan",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="compliance_requirements_overviews",
|
||||
related_query_name="compliance_requirements_overview",
|
||||
to="api.scan",
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "compliance_requirements_overviews",
|
||||
"abstract": False,
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="cro_tenant_scan_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id"],
|
||||
name="cro_scan_comp_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id", "region"],
|
||||
name="cro_scan_comp_reg_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=[
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
],
|
||||
name="cro_scan_comp_req_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=[
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
"region",
|
||||
],
|
||||
name="cro_scan_comp_req_reg_idx",
|
||||
),
|
||||
],
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=(
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
"region",
|
||||
),
|
||||
name="unique_tenant_compliance_requirement_overview",
|
||||
)
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="ComplianceRequirementOverview",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_compliancerequirementoverview",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,29 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0027_compliance_requirement_overviews"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_check_idx",
|
||||
columns="tenant_id, scan_id, check_id",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_check_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0028_findings_check_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "check_id"],
|
||||
name="find_tenant_scan_check_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
107
api/src/backend/api/migrations/0030_lighthouseconfiguration.py
Normal file
107
api/src/backend/api/migrations/0030_lighthouseconfiguration.py
Normal file
@@ -0,0 +1,107 @@
|
||||
# Generated by Django 5.1.10 on 2025-06-12 12:45
|
||||
|
||||
import uuid
|
||||
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0029_findings_check_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="LighthouseConfiguration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
help_text="Name of the configuration",
|
||||
max_length=100,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
),
|
||||
),
|
||||
(
|
||||
"api_key",
|
||||
models.BinaryField(
|
||||
help_text="Encrypted API key for the LLM service"
|
||||
),
|
||||
),
|
||||
(
|
||||
"model",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("gpt-4o-2024-11-20", "GPT-4o v2024-11-20"),
|
||||
("gpt-4o-2024-08-06", "GPT-4o v2024-08-06"),
|
||||
("gpt-4o-2024-05-13", "GPT-4o v2024-05-13"),
|
||||
("gpt-4o", "GPT-4o Default"),
|
||||
("gpt-4o-mini-2024-07-18", "GPT-4o Mini v2024-07-18"),
|
||||
("gpt-4o-mini", "GPT-4o Mini Default"),
|
||||
],
|
||||
default="gpt-4o-2024-08-06",
|
||||
help_text="Must be one of the supported model names",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"temperature",
|
||||
models.FloatField(default=0, help_text="Must be between 0 and 1"),
|
||||
),
|
||||
(
|
||||
"max_tokens",
|
||||
models.IntegerField(
|
||||
default=4000, help_text="Must be between 500 and 5000"
|
||||
),
|
||||
),
|
||||
(
|
||||
"business_context",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
default="",
|
||||
help_text="Additional business context for this AI model configuration",
|
||||
),
|
||||
),
|
||||
("is_active", models.BooleanField(default=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "lighthouse_configurations",
|
||||
"abstract": False,
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id",),
|
||||
name="unique_lighthouse_config_per_tenant",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lighthouseconfiguration",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_lighthouseconfiguration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 5.1.10 on 2025-06-23 10:04
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0030_lighthouseconfiguration"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="scan",
|
||||
name="scheduler_task",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="django_celery_beat.periodictask",
|
||||
),
|
||||
),
|
||||
]
|
||||
150
api/src/backend/api/migrations/0032_saml.py
Normal file
150
api/src/backend/api/migrations/0032_saml.py
Normal file
@@ -0,0 +1,150 @@
|
||||
# Generated by Django 5.1.10 on 2025-07-02 15:47
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0031_scan_disable_on_cascade_periodic_tasks"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="integration",
|
||||
name="integration_type",
|
||||
field=api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SAMLToken",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("expires_at", models.DateTimeField(editable=False)),
|
||||
("token", models.JSONField(unique=True)),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "saml_tokens",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SAMLConfiguration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"email_domain",
|
||||
models.CharField(
|
||||
help_text="Email domain used to identify the tenant, e.g. prowlerdemo.com",
|
||||
max_length=254,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"metadata_xml",
|
||||
models.TextField(
|
||||
help_text="Raw IdP metadata XML to configure SingleSignOnService, certificates, etc."
|
||||
),
|
||||
),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "saml_configurations",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samlconfiguration",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_samlconfiguration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samlconfiguration",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant",), name="unique_samlconfig_per_tenant"
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SAMLDomainIndex",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("email_domain", models.CharField(max_length=254, unique=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "saml_domain_index",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("email_domain", "tenant"),
|
||||
name="unique_resources_by_email_domain",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=api.rls.BaseSecurityConstraint(
|
||||
name="statements_on_samldomainindex",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
34
api/src/backend/api/migrations/0033_processors_enum.py
Normal file
34
api/src/backend/api/migrations/0033_processors_enum.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import PostgresEnumMigration, ProcessorTypeEnum, register_enum
|
||||
from api.models import Processor
|
||||
|
||||
ProcessorTypeEnumMigration = PostgresEnumMigration(
|
||||
enum_name="processor_type",
|
||||
enum_values=tuple(
|
||||
processor_type[0] for processor_type in Processor.ProcessorChoices.choices
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0032_saml"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
ProcessorTypeEnumMigration.create_enum_type,
|
||||
reverse_code=ProcessorTypeEnumMigration.drop_enum_type,
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(register_enum, enum_class=ProcessorTypeEnum),
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
88
api/src/backend/api/migrations/0034_processors.py
Normal file
88
api/src/backend/api/migrations/0034_processors.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-26 13:04
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0033_processors_enum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Processor",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"processor_type",
|
||||
api.db_utils.ProcessorTypeEnumField(
|
||||
choices=[("mutelist", "Mutelist")]
|
||||
),
|
||||
),
|
||||
("configuration", models.JSONField(default=dict)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "processors",
|
||||
"abstract": False,
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "id"], name="processor_tenant_id_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "processor_type"],
|
||||
name="processor_tenant_type_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="processor",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "processor_type"),
|
||||
name="unique_processor_types_tenant",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="processor",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_processor",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scan",
|
||||
name="processor",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
to="api.processor",
|
||||
),
|
||||
),
|
||||
]
|
||||
22
api/src/backend/api/migrations/0035_finding_muted_reason.py
Normal file
22
api/src/backend/api/migrations/0035_finding_muted_reason.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0034_processors"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="muted_reason",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
max_length=500,
|
||||
null=True,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,30 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0035_finding_muted_reason"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_finding_idx",
|
||||
columns="tenant_id, finding_id",
|
||||
method="BTREE",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_finding_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0036_rfm_tenant_finding_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="resourcefindingmapping",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "finding_id"],
|
||||
name="rfm_tenant_finding_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,15 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0037_rfm_tenant_finding_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="failed_findings_count",
|
||||
field=models.IntegerField(default=0),
|
||||
)
|
||||
]
|
||||
@@ -0,0 +1,20 @@
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0038_resource_failed_findings_count"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "-failed_findings_count", "id"],
|
||||
name="resources_failed_findings_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,30 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0039_resource_resources_failed_findings_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_resource_idx",
|
||||
columns="tenant_id, resource_id",
|
||||
method="BTREE",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_resource_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0040_rfm_tenant_resource_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="resourcefindingmapping",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "resource_id"],
|
||||
name="rfm_tenant_resource_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0041_rfm_tenant_resource_parent_partitions"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state", "completed")),
|
||||
fields=["tenant_id", "provider_id", "-inserted_at"],
|
||||
include=("id",),
|
||||
name="scans_prov_ins_desc_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
from rest_framework_json_api.pagination import JsonApiPageNumberPagination
|
||||
from drf_spectacular_jsonapi.schemas.pagination import JsonApiPageNumberPagination
|
||||
|
||||
|
||||
class ComplianceOverviewPagination(JsonApiPageNumberPagination):
|
||||
|
||||
@@ -2,8 +2,7 @@ from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import DEFAULT_DB_ALIAS
|
||||
from django.db import models
|
||||
from django.db import DEFAULT_DB_ALIAS, models
|
||||
from django.db.backends.ddl_references import Statement, Table
|
||||
|
||||
from api.db_utils import DB_USER, POSTGRES_TENANT_VAR
|
||||
@@ -59,11 +58,11 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
drop_sql_query = """
|
||||
ALTER TABLE %(table_name)s NO FORCE ROW LEVEL SECURITY;
|
||||
ALTER TABLE %(table_name)s DISABLE ROW LEVEL SECURITY;
|
||||
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
|
||||
REVOKE ALL ON TABLE %(table_name)s FROM %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_policy_sql_query = """
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(table_name)s_{statement} on %(table_name)s;
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(raw_table_name)s_{statement} ON %(table_name)s;
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -88,9 +87,7 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
|
||||
)
|
||||
|
||||
full_create_sql_query = (
|
||||
f"{self.rls_sql_query}" f"{policy_queries}" f"{grant_queries}"
|
||||
)
|
||||
full_create_sql_query = f"{self.rls_sql_query}{policy_queries}{grant_queries}"
|
||||
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
@@ -107,16 +104,20 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
|
||||
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
field_column = schema_editor.quote_name(self.target_field)
|
||||
raw_table_name = model._meta.db_table
|
||||
table_name = raw_table_name
|
||||
if self.partition_name:
|
||||
raw_table_name = f"{raw_table_name}_{self.partition_name}"
|
||||
table_name = raw_table_name
|
||||
|
||||
full_drop_sql_query = (
|
||||
f"{self.drop_sql_query}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement) for statement in self.statements])}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement=statement) for statement in self.statements])}"
|
||||
)
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
table_name = f"{table_name}_{self.partition_name}"
|
||||
return Statement(
|
||||
full_drop_sql_query,
|
||||
table_name=Table(table_name, schema_editor.quote_name),
|
||||
raw_table_name=raw_table_name,
|
||||
field_column=field_column,
|
||||
db_user=DB_USER,
|
||||
partition_name=self.partition_name,
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from celery import states
|
||||
from celery.signals import before_task_publish
|
||||
from config.celery import celery_app
|
||||
from django.db.models.signals import post_delete
|
||||
from django.dispatch import receiver
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.backends.database import DatabaseBackend
|
||||
|
||||
from api.db_utils import delete_related_daily_task
|
||||
from api.models import Provider
|
||||
from config.celery import celery_app
|
||||
|
||||
|
||||
def create_task_result_on_publish(sender=None, headers=None, **kwargs): # noqa: F841
|
||||
@@ -31,5 +31,4 @@ before_task_publish.connect(
|
||||
@receiver(post_delete, sender=Provider)
|
||||
def delete_provider_scan_task(sender, instance, **kwargs): # noqa: F841
|
||||
# Delete the associated periodic task when the provider is deleted
|
||||
task_name = f"scan-perform-scheduled-{instance.id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
delete_related_daily_task(instance.id)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,13 +3,15 @@ from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from api.models import Membership, User
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_authentication():
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
test_password = "Test_password@1"
|
||||
|
||||
# Check that a 401 is returned when no basic authentication is provided
|
||||
no_auth_response = client.get(reverse("provider-list"))
|
||||
@@ -106,7 +108,7 @@ def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fi
|
||||
user1_email = "user1@testing.com"
|
||||
user2_email = "user2@testing.com"
|
||||
|
||||
password = "thisisapassword123"
|
||||
password = "Thisisapassword123@"
|
||||
|
||||
user1_response = client.post(
|
||||
reverse("user-list"),
|
||||
@@ -177,3 +179,122 @@ def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fi
|
||||
user2_me = client.get(reverse("user-me"), headers=user2_headers)
|
||||
assert user2_me.status_code == 200
|
||||
assert user2_me.json()["data"]["attributes"]["email"] == user2_email
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTokenSwitchTenant:
|
||||
def test_switch_tenant_with_valid_token(self, tenants_fixture, providers_fixture):
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "Test_password1@"
|
||||
|
||||
# Check that we can create a new user without any kind of authentication
|
||||
user_creation_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "test",
|
||||
"email": test_user,
|
||||
"password": test_password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user_creation_response.status_code == 201
|
||||
|
||||
# Create a new relationship between this user and another tenant
|
||||
tenant_id = tenants_fixture[0].id
|
||||
user_instance = User.objects.get(email=test_user)
|
||||
Membership.objects.create(user=user_instance, tenant_id=tenant_id)
|
||||
|
||||
# Check that using our new user's credentials we can authenticate and get the providers
|
||||
access_token, _ = get_api_tokens(client, test_user, test_password)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
user_me_response = client.get(
|
||||
reverse("user-me"),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert user_me_response.status_code == 200
|
||||
# Assert this user belongs to two tenants
|
||||
assert (
|
||||
user_me_response.json()["data"]["relationships"]["memberships"]["meta"][
|
||||
"count"
|
||||
]
|
||||
== 2
|
||||
)
|
||||
|
||||
provider_response = client.get(
|
||||
reverse("provider-list"),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert provider_response.status_code == 200
|
||||
# Empty response since there are no providers in this tenant
|
||||
assert not provider_response.json()["data"]
|
||||
|
||||
switch_tenant_response = client.post(
|
||||
reverse("token-switch"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-switch-tenant",
|
||||
"attributes": {"tenant_id": tenant_id},
|
||||
}
|
||||
},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert switch_tenant_response.status_code == 200
|
||||
new_access_token = switch_tenant_response.json()["data"]["attributes"]["access"]
|
||||
new_auth_headers = get_authorization_header(new_access_token)
|
||||
|
||||
provider_response = client.get(
|
||||
reverse("provider-list"),
|
||||
headers=new_auth_headers,
|
||||
)
|
||||
assert provider_response.status_code == 200
|
||||
# Now it must be data because we switched to another tenant with providers
|
||||
assert provider_response.json()["data"]
|
||||
|
||||
def test_switch_tenant_with_invalid_token(self, create_test_user, tenants_fixture):
|
||||
client = APIClient()
|
||||
|
||||
access_token, refresh_token = get_api_tokens(
|
||||
client, create_test_user.email, TEST_PASSWORD
|
||||
)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
invalid_token_response = client.post(
|
||||
reverse("token-switch"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-switch-tenant",
|
||||
"attributes": {"tenant_id": "invalid_tenant_id"},
|
||||
}
|
||||
},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert invalid_token_response.status_code == 400
|
||||
assert invalid_token_response.json()["errors"][0]["code"] == "invalid"
|
||||
assert (
|
||||
invalid_token_response.json()["errors"][0]["detail"]
|
||||
== "Must be a valid UUID."
|
||||
)
|
||||
|
||||
invalid_tenant_response = client.post(
|
||||
reverse("token-switch"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-switch-tenant",
|
||||
"attributes": {"tenant_id": tenants_fixture[-1].id},
|
||||
}
|
||||
},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert invalid_tenant_response.status_code == 400
|
||||
assert invalid_tenant_response.json()["errors"][0]["code"] == "invalid"
|
||||
assert invalid_tenant_response.json()["errors"][0]["detail"] == (
|
||||
"Tenant does not exist or user is not a " "member."
|
||||
)
|
||||
|
||||
@@ -17,7 +17,7 @@ def test_delete_provider_without_executing_task(
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
test_password = "Test_password1@"
|
||||
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
|
||||
77
api/src/backend/api/tests/test_adapters.py
Normal file
77
api/src/backend/api/tests/test_adapters.py
Normal file
@@ -0,0 +1,77 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from allauth.socialaccount.models import SocialLogin
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
from api.adapters import ProwlerSocialAccountAdapter
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestProwlerSocialAccountAdapter:
|
||||
def test_get_user_by_email_returns_user(self, create_test_user):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
user = adapter.get_user_by_email(create_test_user.email)
|
||||
assert user == create_test_user
|
||||
|
||||
def test_get_user_by_email_returns_none_for_unknown_email(self):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
assert adapter.get_user_by_email("notfound@example.com") is None
|
||||
|
||||
def test_pre_social_login_links_existing_user(self, create_test_user, rf):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account.extra_data = {}
|
||||
sociallogin.user = create_test_user
|
||||
sociallogin.connect = MagicMock()
|
||||
|
||||
adapter.pre_social_login(rf.get("/"), sociallogin)
|
||||
|
||||
call_args = sociallogin.connect.call_args
|
||||
assert call_args is not None
|
||||
|
||||
called_request, called_user = call_args[0]
|
||||
assert called_request.path == "/"
|
||||
assert called_user.email == create_test_user.email
|
||||
|
||||
def test_pre_social_login_no_link_if_email_missing(self, rf):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.user = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account.extra_data = {}
|
||||
sociallogin.connect = MagicMock()
|
||||
|
||||
adapter.pre_social_login(rf.get("/"), sociallogin)
|
||||
|
||||
sociallogin.connect.assert_not_called()
|
||||
|
||||
def test_save_user_saml_sets_session_flag(self, rf):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
request = rf.get("/")
|
||||
request.session = {}
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.account.extra_data = {}
|
||||
|
||||
mock_user = MagicMock()
|
||||
mock_user.id = 123
|
||||
|
||||
with patch("api.adapters.super") as mock_super:
|
||||
with patch("api.adapters.transaction"):
|
||||
with patch("api.adapters.MainRouter"):
|
||||
mock_super.return_value.save_user.return_value = mock_user
|
||||
adapter.save_user(request, sociallogin)
|
||||
assert request.session["saml_user_created"] == "123"
|
||||
@@ -1,12 +1,12 @@
|
||||
from unittest.mock import patch, MagicMock
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from api.compliance import (
|
||||
generate_compliance_overview_template,
|
||||
generate_scan_compliance,
|
||||
get_prowler_provider_checks,
|
||||
get_prowler_provider_compliance,
|
||||
load_prowler_compliance,
|
||||
load_prowler_checks,
|
||||
generate_scan_compliance,
|
||||
generate_compliance_overview_template,
|
||||
load_prowler_compliance,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
@@ -69,7 +69,7 @@ class TestCompliance:
|
||||
|
||||
load_prowler_compliance()
|
||||
|
||||
from api.compliance import PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE, PROWLER_CHECKS
|
||||
from api.compliance import PROWLER_CHECKS, PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
|
||||
|
||||
assert PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE == {
|
||||
"template_key": "template_value"
|
||||
@@ -218,6 +218,10 @@ class TestCompliance:
|
||||
Description="Description of requirement 1",
|
||||
Attributes=[],
|
||||
Checks=["check1", "check2"],
|
||||
Tactics=["tactic1"],
|
||||
SubTechniques=["subtechnique1"],
|
||||
Platforms=["platform1"],
|
||||
TechniqueURL="https://example.com",
|
||||
)
|
||||
requirement2 = MagicMock(
|
||||
Id="requirement2",
|
||||
@@ -225,6 +229,10 @@ class TestCompliance:
|
||||
Description="Description of requirement 2",
|
||||
Attributes=[],
|
||||
Checks=[],
|
||||
Tactics=[],
|
||||
SubTechniques=[],
|
||||
Platforms=[],
|
||||
TechniqueURL="",
|
||||
)
|
||||
compliance1 = MagicMock(
|
||||
Requirements=[requirement1, requirement2],
|
||||
@@ -247,6 +255,10 @@ class TestCompliance:
|
||||
"requirement1": {
|
||||
"name": "Requirement 1",
|
||||
"description": "Description of requirement 1",
|
||||
"tactics": ["tactic1"],
|
||||
"subtechniques": ["subtechnique1"],
|
||||
"platforms": ["platform1"],
|
||||
"technique_url": "https://example.com",
|
||||
"attributes": [],
|
||||
"checks": {"check1": None, "check2": None},
|
||||
"checks_status": {
|
||||
@@ -260,6 +272,10 @@ class TestCompliance:
|
||||
"requirement2": {
|
||||
"name": "Requirement 2",
|
||||
"description": "Description of requirement 2",
|
||||
"tactics": [],
|
||||
"subtechniques": [],
|
||||
"platforms": [],
|
||||
"technique_url": "",
|
||||
"attributes": [],
|
||||
"checks": {},
|
||||
"checks_status": {
|
||||
@@ -268,7 +284,7 @@ class TestCompliance:
|
||||
"manual": 0,
|
||||
"total": 0,
|
||||
},
|
||||
"status": "PASS",
|
||||
"status": "MANUAL",
|
||||
},
|
||||
},
|
||||
"requirements_status": {
|
||||
|
||||
@@ -3,12 +3,17 @@ from enum import Enum
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from freezegun import freeze_time
|
||||
|
||||
from api.db_utils import (
|
||||
_should_create_index_on_partition,
|
||||
batch_delete,
|
||||
create_objects_in_batches,
|
||||
enum_to_choices,
|
||||
generate_random_token,
|
||||
one_week_from_now,
|
||||
update_objects_in_batches,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
@@ -131,9 +136,180 @@ class TestBatchDelete:
|
||||
return provider_count
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_batch_delete(self, create_test_providers):
|
||||
def test_batch_delete(self, tenants_fixture, create_test_providers):
|
||||
tenant_id = str(tenants_fixture[0].id)
|
||||
_, summary = batch_delete(
|
||||
Provider.objects.all(), batch_size=create_test_providers // 2
|
||||
tenant_id, Provider.objects.all(), batch_size=create_test_providers // 2
|
||||
)
|
||||
assert Provider.objects.all().count() == 0
|
||||
assert summary == {"api.Provider": create_test_providers}
|
||||
|
||||
|
||||
class TestShouldCreateIndexOnPartition:
|
||||
@freeze_time("2025-05-15 00:00:00Z")
|
||||
@pytest.mark.parametrize(
|
||||
"partition_name, all_partitions, expected",
|
||||
[
|
||||
("any_name", True, True),
|
||||
("findings_default", True, True),
|
||||
("findings_2022_jan", True, True),
|
||||
("foo_bar", False, True),
|
||||
("findings_2025_MAY", False, True),
|
||||
("findings_2025_may", False, True),
|
||||
("findings_2025_jun", False, True),
|
||||
("findings_2025_apr", False, False),
|
||||
("findings_2025_xyz", False, True),
|
||||
],
|
||||
)
|
||||
def test_partition_inclusion_logic(self, partition_name, all_partitions, expected):
|
||||
assert (
|
||||
_should_create_index_on_partition(partition_name, all_partitions)
|
||||
is expected
|
||||
)
|
||||
|
||||
@freeze_time("2025-05-15 00:00:00Z")
|
||||
def test_invalid_date_components(self):
|
||||
# even if regex matches but int conversion fails, we fallback True
|
||||
# (e.g. year too big, month number parse error)
|
||||
bad_name = "findings_99999_jan"
|
||||
assert _should_create_index_on_partition(bad_name, False) is True
|
||||
|
||||
bad_name2 = "findings_2025_abc"
|
||||
# abc not in month_map → fallback True
|
||||
assert _should_create_index_on_partition(bad_name2, False) is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestCreateObjectsInBatches:
|
||||
@pytest.fixture
|
||||
def tenant(self, tenants_fixture):
|
||||
return tenants_fixture[0]
|
||||
|
||||
def make_provider_instances(self, tenant, count):
|
||||
"""
|
||||
Return a list of `count` unsaved Provider instances for the given tenant.
|
||||
"""
|
||||
base_uid = 1000
|
||||
return [
|
||||
Provider(
|
||||
tenant=tenant,
|
||||
uid=str(base_uid + i),
|
||||
provider=Provider.ProviderChoices.AWS,
|
||||
)
|
||||
for i in range(count)
|
||||
]
|
||||
|
||||
def test_exact_multiple_of_batch(self, tenant):
|
||||
total = 6
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant)
|
||||
assert qs.count() == total
|
||||
|
||||
def test_non_multiple_of_batch(self, tenant):
|
||||
total = 7
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant)
|
||||
assert qs.count() == total
|
||||
|
||||
def test_batch_size_default(self, tenant):
|
||||
default_size = settings.DJANGO_DELETION_BATCH_SIZE
|
||||
total = default_size + 2
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant)
|
||||
assert qs.count() == total
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestUpdateObjectsInBatches:
|
||||
@pytest.fixture
|
||||
def tenant(self, tenants_fixture):
|
||||
return tenants_fixture[0]
|
||||
|
||||
def make_provider_instances(self, tenant, count):
|
||||
"""
|
||||
Return a list of `count` unsaved Provider instances for the given tenant.
|
||||
"""
|
||||
base_uid = 2000
|
||||
return [
|
||||
Provider(
|
||||
tenant=tenant,
|
||||
uid=str(base_uid + i),
|
||||
provider=Provider.ProviderChoices.AWS,
|
||||
)
|
||||
for i in range(count)
|
||||
]
|
||||
|
||||
def test_exact_multiple_of_batch(self, tenant):
|
||||
total = 6
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
# Fetch them back, mutate the `uid` field, then update in batches
|
||||
providers = list(Provider.objects.filter(tenant=tenant))
|
||||
for p in providers:
|
||||
p.uid = f"{p.uid}_upd"
|
||||
|
||||
update_objects_in_batches(
|
||||
tenant_id=str(tenant.id),
|
||||
model=Provider,
|
||||
objects=providers,
|
||||
fields=["uid"],
|
||||
batch_size=batch_size,
|
||||
)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
|
||||
assert qs.count() == total
|
||||
|
||||
def test_non_multiple_of_batch(self, tenant):
|
||||
total = 7
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
providers = list(Provider.objects.filter(tenant=tenant))
|
||||
for p in providers:
|
||||
p.uid = f"{p.uid}_upd"
|
||||
|
||||
update_objects_in_batches(
|
||||
tenant_id=str(tenant.id),
|
||||
model=Provider,
|
||||
objects=providers,
|
||||
fields=["uid"],
|
||||
batch_size=batch_size,
|
||||
)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
|
||||
assert qs.count() == total
|
||||
|
||||
def test_batch_size_default(self, tenant):
|
||||
default_size = settings.DJANGO_DELETION_BATCH_SIZE
|
||||
total = default_size + 2
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs)
|
||||
|
||||
providers = list(Provider.objects.filter(tenant=tenant))
|
||||
for p in providers:
|
||||
p.uid = f"{p.uid}_upd"
|
||||
|
||||
# Update without specifying batch_size (uses default)
|
||||
update_objects_in_batches(
|
||||
tenant_id=str(tenant.id),
|
||||
model=Provider,
|
||||
objects=providers,
|
||||
fields=["uid"],
|
||||
)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
|
||||
assert qs.count() == total
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user