mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-03-31 05:06:57 +00:00
Compare commits
710 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ec7a95ebba | ||
|
|
99dec659d6 | ||
|
|
1b7630cbe3 | ||
|
|
740ab266fe | ||
|
|
a3b606fc71 | ||
|
|
f6bb6efbf1 | ||
|
|
91b1feffcb | ||
|
|
da5e11e08e | ||
|
|
3ca8aacbfa | ||
|
|
f88d45535c | ||
|
|
6d6864d9c5 | ||
|
|
03ec12a2ca | ||
|
|
0fc4e23b81 | ||
|
|
45780de281 | ||
|
|
ab80f3be18 | ||
|
|
98da709202 | ||
|
|
6cb1acd93d | ||
|
|
d4e5f37894 | ||
|
|
e0b1036f22 | ||
|
|
d2dde4fbbd | ||
|
|
5572c675d8 | ||
|
|
ace0dab20d | ||
|
|
8ef42bd1d6 | ||
|
|
75a5496459 | ||
|
|
2b4bb7f805 | ||
|
|
f1c165a89d | ||
|
|
a0e72eab0b | ||
|
|
e539b1ab4d | ||
|
|
ddc7fe649d | ||
|
|
d80ddc5107 | ||
|
|
04d3138a22 | ||
|
|
caaadbbc26 | ||
|
|
6a104141f3 | ||
|
|
7399815aa4 | ||
|
|
94e5902553 | ||
|
|
a0b1838bd2 | ||
|
|
1300cf0ed2 | ||
|
|
c2cd5bcb30 | ||
|
|
eb304023b7 | ||
|
|
f6d78770e5 | ||
|
|
7e3ee14741 | ||
|
|
cfa3ba2c94 | ||
|
|
fc069d9eb1 | ||
|
|
071030c00d | ||
|
|
e2c93a0ba8 | ||
|
|
deed6c0b5e | ||
|
|
fef5d3d0e4 | ||
|
|
52ba89ebdd | ||
|
|
4cdbc551d5 | ||
|
|
7f582c8098 | ||
|
|
f4c797e9d4 | ||
|
|
b3b88ebd68 | ||
|
|
a01ff0f7cc | ||
|
|
804c2cf058 | ||
|
|
9354cfac9e | ||
|
|
13b3cf8fee | ||
|
|
98b46a94d3 | ||
|
|
575bb8cd2c | ||
|
|
fcc25451d8 | ||
|
|
6c04592e7e | ||
|
|
73f9811f42 | ||
|
|
c89673a01e | ||
|
|
2c7b10d71a | ||
|
|
9ea500009b | ||
|
|
5f92e33a54 | ||
|
|
d6b5d8a919 | ||
|
|
b3d5f7b848 | ||
|
|
a96aa9a3f6 | ||
|
|
ff3cd0f51b | ||
|
|
c208948521 | ||
|
|
f1d5f73d40 | ||
|
|
1cc09b81f9 | ||
|
|
56ef1a4f87 | ||
|
|
0f75c2a24f | ||
|
|
b7c317bf23 | ||
|
|
54aa1a4507 | ||
|
|
0d4bd8c0a0 | ||
|
|
1c040b8e41 | ||
|
|
c3b36720dc | ||
|
|
af6b0833a1 | ||
|
|
8817d08a92 | ||
|
|
78d9508862 | ||
|
|
dc543b2c89 | ||
|
|
0025c99fb9 | ||
|
|
d3f12075e9 | ||
|
|
513bf6bca7 | ||
|
|
7459fe9556 | ||
|
|
2b06f0115e | ||
|
|
2e134815d3 | ||
|
|
118a1c1138 | ||
|
|
1b35a72915 | ||
|
|
2d7b110b1b | ||
|
|
7c00c949e6 | ||
|
|
8fcbcda15c | ||
|
|
fb33506f4a | ||
|
|
e43b572d5e | ||
|
|
ff22e13e24 | ||
|
|
c4946a8938 | ||
|
|
0e51ee9c7c | ||
|
|
22a5776ec0 | ||
|
|
108983e959 | ||
|
|
635b3f1978 | ||
|
|
dc3d5149e9 | ||
|
|
cffa560c9d | ||
|
|
80c8cb9b6c | ||
|
|
dbffcedc49 | ||
|
|
5d4191a7fc | ||
|
|
ab6d05637d | ||
|
|
75b3c02811 | ||
|
|
e25ff209b3 | ||
|
|
81cf5303a1 | ||
|
|
087ac5b53a | ||
|
|
fb429c9e23 | ||
|
|
284cd66ed6 | ||
|
|
3ae2c4a225 | ||
|
|
2d270ace7f | ||
|
|
aaeb71a563 | ||
|
|
737abe83c8 | ||
|
|
a78c5499c9 | ||
|
|
e50d779e34 | ||
|
|
be2965d274 | ||
|
|
0bdaeff745 | ||
|
|
ad25a8fe82 | ||
|
|
aebc89c17c | ||
|
|
8fac7ad44d | ||
|
|
b0f5d6718f | ||
|
|
83dfa8ae45 | ||
|
|
d929e293b5 | ||
|
|
87c4361559 | ||
|
|
de36bddccf | ||
|
|
97dac23d39 | ||
|
|
64082b5038 | ||
|
|
2f6e83ad0c | ||
|
|
24e0a175b5 | ||
|
|
7c1ae956d5 | ||
|
|
896c466889 | ||
|
|
e2fd3f14ed | ||
|
|
8239e2cd09 | ||
|
|
9e21348cdd | ||
|
|
62672a98f2 | ||
|
|
08f52ee668 | ||
|
|
253847e3cd | ||
|
|
8449728df1 | ||
|
|
ba9d0cd9c2 | ||
|
|
1981173e75 | ||
|
|
1f250dccb7 | ||
|
|
4a8f6070d3 | ||
|
|
87f89e68eb | ||
|
|
87cb33ba57 | ||
|
|
83cb2ed297 | ||
|
|
57ff41db3e | ||
|
|
3ad376fe07 | ||
|
|
9f76c47c85 | ||
|
|
c6c46b0f23 | ||
|
|
acb98372c7 | ||
|
|
1998054680 | ||
|
|
bb94ede69f | ||
|
|
676133c14d | ||
|
|
00e33d39bb | ||
|
|
311c9a41ff | ||
|
|
86b6732013 | ||
|
|
f851a90cb0 | ||
|
|
c8983440f1 | ||
|
|
ca21d8ceae | ||
|
|
c07a531c3b | ||
|
|
b0e3511351 | ||
|
|
1c3d5b5f69 | ||
|
|
e38a2f47b3 | ||
|
|
b4dab02f5a | ||
|
|
a48fafc277 | ||
|
|
109c23ba69 | ||
|
|
2fe98ae6ce | ||
|
|
d56744844a | ||
|
|
cd21c6980b | ||
|
|
ddc0c84fb2 | ||
|
|
26040f04ad | ||
|
|
bf7b2d7c8f | ||
|
|
5df1e163ee | ||
|
|
a1326022e5 | ||
|
|
4f29743604 | ||
|
|
ff9c992bf8 | ||
|
|
8eb9d17006 | ||
|
|
7a6ed613d5 | ||
|
|
98e3c4a105 | ||
|
|
010457ef09 | ||
|
|
5d5e4530b3 | ||
|
|
fe5694fd39 | ||
|
|
7c614ef160 | ||
|
|
36df42cf64 | ||
|
|
09be60948d | ||
|
|
a8b102794c | ||
|
|
01ed65fcf8 | ||
|
|
d7b024b460 | ||
|
|
45f1b4aeda | ||
|
|
1e79112ea0 | ||
|
|
7a27a8ddf5 | ||
|
|
d8e575e6dc | ||
|
|
f7ed5bd365 | ||
|
|
1c5348d846 | ||
|
|
2bde25a471 | ||
|
|
c1b70ed0fc | ||
|
|
575ef41d4f | ||
|
|
8f19bfda0d | ||
|
|
fefdce129b | ||
|
|
f85abf90fb | ||
|
|
855cc17a23 | ||
|
|
d489c80857 | ||
|
|
b81e12f697 | ||
|
|
c02b9073d1 | ||
|
|
2ce4d72111 | ||
|
|
eec0b45b32 | ||
|
|
f4746a1b09 | ||
|
|
d4c23efee3 | ||
|
|
6d7711ca1c | ||
|
|
a7f733522c | ||
|
|
e2dfc1f383 | ||
|
|
163ae5d79d | ||
|
|
ff8c33ecf8 | ||
|
|
cf4e8e940d | ||
|
|
763e88edb5 | ||
|
|
c301fbd8b3 | ||
|
|
5121015586 | ||
|
|
b750b6492d | ||
|
|
da656b5086 | ||
|
|
c82437c32c | ||
|
|
22c4216ed6 | ||
|
|
07d0f72239 | ||
|
|
6d14b5619c | ||
|
|
f12c7000bb | ||
|
|
ec06fd0bf4 | ||
|
|
92911d2c0b | ||
|
|
985bfc1618 | ||
|
|
d34c1556a1 | ||
|
|
c0d4f43310 | ||
|
|
0b2dac83bd | ||
|
|
b3d7bb4e8d | ||
|
|
71f8bcefa8 | ||
|
|
a11b338994 | ||
|
|
c7c8fc90b2 | ||
|
|
2bd32f6e8f | ||
|
|
32c8cc723e | ||
|
|
c298541d8d | ||
|
|
dcf53ea357 | ||
|
|
3c9ae06086 | ||
|
|
735a8fbb95 | ||
|
|
7442eb398c | ||
|
|
509f185890 | ||
|
|
7712968eeb | ||
|
|
fc3ee5534d | ||
|
|
376ec4c73b | ||
|
|
3db0a0ed6f | ||
|
|
c3e5980eb0 | ||
|
|
c830829b55 | ||
|
|
3b21b7ecf0 | ||
|
|
a3810f3fda | ||
|
|
18fc405d15 | ||
|
|
aeb3bdc779 | ||
|
|
b32f0997f5 | ||
|
|
a72d8d7c83 | ||
|
|
a4f5566589 | ||
|
|
bd839e1398 | ||
|
|
7e9b1630f0 | ||
|
|
e2330acbff | ||
|
|
0f15a20128 | ||
|
|
237ec20513 | ||
|
|
497adbb4e3 | ||
|
|
1bb939c609 | ||
|
|
8ee73af0c0 | ||
|
|
6ead888399 | ||
|
|
c20f9edb0b | ||
|
|
747d62393e | ||
|
|
e757bde7ca | ||
|
|
29ff19eead | ||
|
|
c516773cff | ||
|
|
e4a682e23e | ||
|
|
d427455db5 | ||
|
|
f0cbcacbe4 | ||
|
|
df11afbcf0 | ||
|
|
cc6db6b680 | ||
|
|
0eb51dc147 | ||
|
|
aeb3dc9ac2 | ||
|
|
36c8240f2b | ||
|
|
a0852c397d | ||
|
|
32f1d1a713 | ||
|
|
76be3bd4ae | ||
|
|
93b59e15e8 | ||
|
|
f559b87018 | ||
|
|
84b50f2a3f | ||
|
|
69c7dc339f | ||
|
|
ea396a90e3 | ||
|
|
d460509262 | ||
|
|
4d2bb93a8c | ||
|
|
d936c3f934 | ||
|
|
c1e36f0df7 | ||
|
|
d1f04fefb0 | ||
|
|
8703c55b5a | ||
|
|
0af5c9c9c2 | ||
|
|
57717467c7 | ||
|
|
b59930803c | ||
|
|
835272b3ab | ||
|
|
6a67d8d93a | ||
|
|
f87fd35c95 | ||
|
|
7a00750e0a | ||
|
|
412697c418 | ||
|
|
3cd4e4cbbf | ||
|
|
e18cace3bb | ||
|
|
a1252e7afc | ||
|
|
7bc9aa2424 | ||
|
|
c4f87f45d4 | ||
|
|
93f3a094fc | ||
|
|
881133d6b9 | ||
|
|
ba02a26cc1 | ||
|
|
bb1b81455c | ||
|
|
3116fef89a | ||
|
|
f6dbf3ef5f | ||
|
|
35a9c03467 | ||
|
|
5fc95a879c | ||
|
|
8ffa958c20 | ||
|
|
476cc59ca1 | ||
|
|
26390cdcd9 | ||
|
|
95b8390ddd | ||
|
|
e9aebbd269 | ||
|
|
11ffbd86eb | ||
|
|
9f32ff0c10 | ||
|
|
33046eb95d | ||
|
|
a77d6ebaa2 | ||
|
|
f1fa79b7ae | ||
|
|
72874201c1 | ||
|
|
d979076814 | ||
|
|
2bdc6fef26 | ||
|
|
5bfcdbb0b9 | ||
|
|
8812566c83 | ||
|
|
fab7a107fa | ||
|
|
cbd5197ddc | ||
|
|
0b87b63cdd | ||
|
|
18d7ce3607 | ||
|
|
8f20eb958d | ||
|
|
33ab90e119 | ||
|
|
d4c59bf4fe | ||
|
|
5e45b30823 | ||
|
|
77f15a8749 | ||
|
|
953fc0590b | ||
|
|
8ed6e497a3 | ||
|
|
10c3a1e3ce | ||
|
|
32dec0b235 | ||
|
|
bf50c5e7b0 | ||
|
|
a660cf7024 | ||
|
|
50851aec64 | ||
|
|
b931a81692 | ||
|
|
7ce291d0d3 | ||
|
|
d257e6368e | ||
|
|
9f43391521 | ||
|
|
02099b793d | ||
|
|
56b760208e | ||
|
|
042f138f56 | ||
|
|
f347080dbd | ||
|
|
2cc8363697 | ||
|
|
154467e7c8 | ||
|
|
16c71f3c04 | ||
|
|
849707166a | ||
|
|
59513d777c | ||
|
|
f52929673d | ||
|
|
611681488d | ||
|
|
9630f23585 | ||
|
|
23cfd91708 | ||
|
|
0c44f28bf7 | ||
|
|
09c529d810 | ||
|
|
5be859d86c | ||
|
|
5340008c8f | ||
|
|
a5aa4c30d7 | ||
|
|
56d0c2fbea | ||
|
|
6b7ef199e0 | ||
|
|
933c5063ee | ||
|
|
72a998a692 | ||
|
|
a4d1e3bb69 | ||
|
|
2a8b04cced | ||
|
|
cfc02186d4 | ||
|
|
7e432a3b69 | ||
|
|
fed8de314f | ||
|
|
ce23c4b5aa | ||
|
|
33e99ef628 | ||
|
|
72761a6ef6 | ||
|
|
2a4fdff827 | ||
|
|
8e264313bc | ||
|
|
21654b0bc0 | ||
|
|
7fa57ba3e2 | ||
|
|
ea9b6bb191 | ||
|
|
95acb7b0c8 | ||
|
|
d23edfa337 | ||
|
|
40678a5863 | ||
|
|
23aded92a3 | ||
|
|
6e56d3862d | ||
|
|
d95fccd163 | ||
|
|
7ddf860a55 | ||
|
|
3f41c75a45 | ||
|
|
04b6dbf639 | ||
|
|
ff4d16deb5 | ||
|
|
562921cd5e | ||
|
|
8f061e4fed | ||
|
|
3fb86d754a | ||
|
|
7874707310 | ||
|
|
1c934e37c7 | ||
|
|
8459cff16d | ||
|
|
57ae096395 | ||
|
|
200185de25 | ||
|
|
f8447b0f79 | ||
|
|
19289bbe20 | ||
|
|
b5b371fa0c | ||
|
|
939a623cec | ||
|
|
926f449ae6 | ||
|
|
646668c6ae | ||
|
|
8e6b92792b | ||
|
|
65c081ce38 | ||
|
|
5600131d6a | ||
|
|
dbd271980f | ||
|
|
ff532a899e | ||
|
|
0c9675ec70 | ||
|
|
d45eda2b2b | ||
|
|
0abcf80d19 | ||
|
|
c0e10fd395 | ||
|
|
a80e9b26a8 | ||
|
|
2a9cd57fb8 | ||
|
|
a0ad1a5f49 | ||
|
|
dff22dd166 | ||
|
|
58138810b9 | ||
|
|
1ecc272fe4 | ||
|
|
b784167006 | ||
|
|
cf0ec8dea0 | ||
|
|
96cae5e961 | ||
|
|
a48e5cb15f | ||
|
|
5a9ff007e0 | ||
|
|
24c45f894c | ||
|
|
5d03c85629 | ||
|
|
41dc397a7a | ||
|
|
237a9adce9 | ||
|
|
a06167f1c2 | ||
|
|
a7d58c40dd | ||
|
|
e260c46389 | ||
|
|
115169a596 | ||
|
|
5b19173c1d | ||
|
|
d3dd1644e6 | ||
|
|
8ff0c59964 | ||
|
|
285939c389 | ||
|
|
a62ae8af51 | ||
|
|
5d78b9e439 | ||
|
|
1056c270ca | ||
|
|
eeef6600b7 | ||
|
|
e142f17abe | ||
|
|
a65d858dac | ||
|
|
6235a1ba41 | ||
|
|
05007d03ee | ||
|
|
102d099947 | ||
|
|
3194675a5c | ||
|
|
14e6e4aa68 | ||
|
|
b24c3665b5 | ||
|
|
1f60878867 | ||
|
|
2dd18662d8 | ||
|
|
175360dbe6 | ||
|
|
80e24b971f | ||
|
|
78877c470a | ||
|
|
9c9b100359 | ||
|
|
10f3232294 | ||
|
|
a2e5f70f36 | ||
|
|
8d8b31c757 | ||
|
|
cba1e718b9 | ||
|
|
6c3c37fc26 | ||
|
|
b610cacd0c | ||
|
|
027a5705cb | ||
|
|
b7fbfb4360 | ||
|
|
5acf0a7e3d | ||
|
|
3a25e86e30 | ||
|
|
50f1592eb3 | ||
|
|
0f2927cb88 | ||
|
|
b4e1434052 | ||
|
|
43710783f9 | ||
|
|
16f767e7b9 | ||
|
|
42818217a0 | ||
|
|
13405594b2 | ||
|
|
b5a3852334 | ||
|
|
181ff1acb3 | ||
|
|
91e59a3279 | ||
|
|
b3fad1a765 | ||
|
|
6f90927a79 | ||
|
|
0bb4a9a3e9 | ||
|
|
80d9cde60b | ||
|
|
11196c2f83 | ||
|
|
55a0d0a1b5 | ||
|
|
4e5e1d7bd4 | ||
|
|
06a0a434ab | ||
|
|
153833fc55 | ||
|
|
fc4877975f | ||
|
|
0797efd4fd | ||
|
|
fbec99a0b7 | ||
|
|
b2cb1de95e | ||
|
|
190c2316d7 | ||
|
|
f6c352281a | ||
|
|
66dfe89936 | ||
|
|
8b3942ca49 | ||
|
|
9d35213bd5 | ||
|
|
3e586e615d | ||
|
|
a4f950e093 | ||
|
|
7c2441f6ff | ||
|
|
0a92af3eb2 | ||
|
|
666f3a0e20 | ||
|
|
06ef98b5cc | ||
|
|
79125bdd40 | ||
|
|
e8e8b085ac | ||
|
|
c9b81d003a | ||
|
|
23fa3c1e38 | ||
|
|
03fbd0baca | ||
|
|
d4fe24ef47 | ||
|
|
9c5220ee98 | ||
|
|
6491bce5a6 | ||
|
|
ca375dd79c | ||
|
|
e807573b54 | ||
|
|
c0f4c9743f | ||
|
|
5974d0b5da | ||
|
|
6244a8a5f7 | ||
|
|
5b9dae4529 | ||
|
|
a424374c44 | ||
|
|
b7fc2542e8 | ||
|
|
83a1598a1e | ||
|
|
b22b56a06b | ||
|
|
5020e4713c | ||
|
|
ee534a740e | ||
|
|
48cb45b7a8 | ||
|
|
91b74822e9 | ||
|
|
287eef5085 | ||
|
|
45d359c84a | ||
|
|
6049e5d4e8 | ||
|
|
dfd377f89e | ||
|
|
37e6c52c14 | ||
|
|
d6a7f4d88f | ||
|
|
239cda0a90 | ||
|
|
4a821e425b | ||
|
|
e1a2f0c204 | ||
|
|
c70860c733 | ||
|
|
05e71e033f | ||
|
|
5164ec2eb9 | ||
|
|
be18dac4f9 | ||
|
|
bb126c242f | ||
|
|
e27780a856 | ||
|
|
196ec51751 | ||
|
|
86abf9e64c | ||
|
|
9d8be578e3 | ||
|
|
b3aa800082 | ||
|
|
501674a778 | ||
|
|
5ff6ae79d8 | ||
|
|
e518a869ab | ||
|
|
43927a62f3 | ||
|
|
335980c8d8 | ||
|
|
ca3ee378db | ||
|
|
c05bc1068a | ||
|
|
2e3164636d | ||
|
|
c34e07fc40 | ||
|
|
6022122a61 | ||
|
|
f65f5e4b46 | ||
|
|
dee17733a0 | ||
|
|
cddda1e64e | ||
|
|
f7b873db03 | ||
|
|
792bc70d0a | ||
|
|
185491b061 | ||
|
|
3af8a43480 | ||
|
|
fd78406b29 | ||
|
|
4758b258a3 | ||
|
|
015e2b3b88 | ||
|
|
e184c9cb61 | ||
|
|
9004a01183 | ||
|
|
dd65ba3d9e | ||
|
|
bba616a18f | ||
|
|
aa0f8d2981 | ||
|
|
2511d6ffa9 | ||
|
|
27329457be | ||
|
|
7189f3d526 | ||
|
|
58e7589c9d | ||
|
|
d60f4b5ded | ||
|
|
4c2ec094f6 | ||
|
|
395ecaff5b | ||
|
|
c39506ef7d | ||
|
|
eb90d479e2 | ||
|
|
b92a73f5ea | ||
|
|
ad121f3059 | ||
|
|
70e4c5a44e | ||
|
|
b5a46b7b59 | ||
|
|
f1a97cd166 | ||
|
|
0774508093 | ||
|
|
0664ce6b94 | ||
|
|
407c779c52 | ||
|
|
c60f13f23f | ||
|
|
37d912ef01 | ||
|
|
d3de89c017 | ||
|
|
cb22af25c6 | ||
|
|
a534b94495 | ||
|
|
6262b4ff0b | ||
|
|
84ecd7ab2c | ||
|
|
1a5428445a | ||
|
|
ac8e991ca0 | ||
|
|
83a0331472 | ||
|
|
cce31e2971 | ||
|
|
0adf7d6e77 | ||
|
|
295f8b557e | ||
|
|
bb2c5c3161 | ||
|
|
0018f36a36 | ||
|
|
857de84f49 | ||
|
|
9630f2242a | ||
|
|
1fe125867c | ||
|
|
0737893240 | ||
|
|
282fe3d348 | ||
|
|
b5d83640ae | ||
|
|
2823d3ad21 | ||
|
|
00b93bfe86 | ||
|
|
84c253d887 | ||
|
|
2ab5a702c9 | ||
|
|
11d9cdf24e | ||
|
|
b1de41619b | ||
|
|
18a4881a51 | ||
|
|
de76a168c0 | ||
|
|
bcc13a742d | ||
|
|
23b584f4bf | ||
|
|
074b7c1ff5 | ||
|
|
c04e9ed914 | ||
|
|
1f1b126e79 | ||
|
|
9b0dd80f13 | ||
|
|
b0807478f2 | ||
|
|
11dfa58ecd | ||
|
|
412f396e0a | ||
|
|
8100d43ff2 | ||
|
|
bc96acef48 | ||
|
|
6e9876b61a | ||
|
|
32253ca4f7 | ||
|
|
4c6be5e283 | ||
|
|
69178fd7bd | ||
|
|
cd4432c14b | ||
|
|
0e47172aec | ||
|
|
efe18ae8b2 | ||
|
|
d5e13df4fe | ||
|
|
b0e84d74f2 | ||
|
|
28526b591c | ||
|
|
51e6a6edb1 | ||
|
|
c1b132a29e | ||
|
|
e2530e7d57 | ||
|
|
f2fcb1599b | ||
|
|
160eafa0c9 | ||
|
|
c2bc0f1368 | ||
|
|
27d27fff81 | ||
|
|
66e8f0ce18 | ||
|
|
0ceae8361b | ||
|
|
5e52fded83 | ||
|
|
0a7d07b4b6 | ||
|
|
34b5f483d7 | ||
|
|
9d04a1bc52 | ||
|
|
b25c0aaa00 | ||
|
|
652b93ea45 | ||
|
|
ccb7726511 | ||
|
|
c514a4e451 | ||
|
|
d841bd6890 | ||
|
|
ff54e10ab2 | ||
|
|
718e562741 | ||
|
|
ce05e2a939 | ||
|
|
90831d3084 | ||
|
|
2c928dead5 | ||
|
|
3ffe147664 | ||
|
|
3373b0f47c | ||
|
|
b29db04560 | ||
|
|
f964a0362e | ||
|
|
537b23dfae | ||
|
|
48cf3528b4 | ||
|
|
3c4b9d32c9 | ||
|
|
fd8361ae2c | ||
|
|
33cadaa932 | ||
|
|
cc126eb8b4 | ||
|
|
c996b3f6aa | ||
|
|
e2b406a300 | ||
|
|
c2c69da603 | ||
|
|
4e51348ff2 | ||
|
|
3257b82706 | ||
|
|
c98d764daa | ||
|
|
0448429a9f | ||
|
|
b948ac6125 | ||
|
|
3acc09ea16 | ||
|
|
82d0d0de9d | ||
|
|
f9cfc4d087 | ||
|
|
7d68ff455b | ||
|
|
ddf4881971 | ||
|
|
9ad4944142 | ||
|
|
7f33ea76a4 | ||
|
|
1140c29384 | ||
|
|
2441a62f39 | ||
|
|
c26a231fc1 | ||
|
|
2fb2315037 | ||
|
|
a9e475481a | ||
|
|
826d7c4dc3 | ||
|
|
b7f4b37f66 | ||
|
|
193d691bfe | ||
|
|
a359bc581c | ||
|
|
9a28ff025a | ||
|
|
f1c7050700 | ||
|
|
9391c27b9e | ||
|
|
4c54de092f | ||
|
|
690c482a43 | ||
|
|
ad2d857c6f | ||
|
|
07ee59d2ef | ||
|
|
bec4617d0a | ||
|
|
94916f8305 | ||
|
|
44de651be3 | ||
|
|
bdcba9c642 | ||
|
|
c172f75f1a | ||
|
|
ec492fa13a | ||
|
|
702659959c | ||
|
|
fef332a591 |
21
.env
21
.env
@@ -6,17 +6,11 @@
|
||||
PROWLER_UI_VERSION="stable"
|
||||
AUTH_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
NEXT_PUBLIC_API_BASE_URL=${API_BASE_URL}
|
||||
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
|
||||
AUTH_TRUST_HOST=true
|
||||
UI_PORT=3000
|
||||
# Temp URL for feeds need to use actual
|
||||
RSS_FEED_URL=https://prowler.com/blog/rss
|
||||
# openssl rand -base64 32
|
||||
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
|
||||
# Google Tag Manager ID
|
||||
NEXT_PUBLIC_GOOGLE_TAG_MANAGER_ID=""
|
||||
|
||||
|
||||
#### Prowler API Configuration ####
|
||||
PROWLER_API_VERSION="stable"
|
||||
@@ -30,10 +24,6 @@ POSTGRES_USER=prowler
|
||||
POSTGRES_PASSWORD=postgres
|
||||
POSTGRES_DB=prowler_db
|
||||
|
||||
# Celery-Prowler task settings
|
||||
TASK_RETRY_DELAY_SECONDS=0.1
|
||||
TASK_RETRY_ATTEMPTS=5
|
||||
|
||||
# Valkey settings
|
||||
# If running Valkey and celery on host, use localhost, else use 'valkey'
|
||||
VALKEY_HOST=valkey
|
||||
@@ -133,7 +123,7 @@ SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.10.0
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.5.1
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
@@ -143,12 +133,3 @@ SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
SOCIAL_GITHUB_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/github"
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
# Single Sign-On (SSO)
|
||||
SAML_SSO_CALLBACK_URL="${AUTH_URL}/api/auth/callback/saml"
|
||||
|
||||
# Lighthouse tracing
|
||||
LANGSMITH_TRACING=false
|
||||
LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
|
||||
LANGSMITH_API_KEY=""
|
||||
LANGCHAIN_PROJECT=""
|
||||
|
||||
10
.github/labeler.yml
vendored
10
.github/labeler.yml
vendored
@@ -22,21 +22,11 @@ provider/kubernetes:
|
||||
- any-glob-to-any-file: "prowler/providers/kubernetes/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/**"
|
||||
|
||||
provider/m365:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/m365/**"
|
||||
- any-glob-to-any-file: "tests/providers/m365/**"
|
||||
|
||||
provider/github:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/github/**"
|
||||
- any-glob-to-any-file: "tests/providers/github/**"
|
||||
|
||||
provider/iac:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/iac/**"
|
||||
- any-glob-to-any-file: "tests/providers/iac/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
|
||||
1
.github/pull_request_template.md
vendored
1
.github/pull_request_template.md
vendored
@@ -16,7 +16,6 @@ Please include a summary of the change and which issue is fixed. List any depend
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
- [ ] Review if backport is needed.
|
||||
- [ ] Review if is needed to change the [Readme.md](https://github.com/prowler-cloud/prowler/blob/master/README.md)
|
||||
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/prowler/CHANGELOG.md), if applicable.
|
||||
|
||||
#### API
|
||||
- [ ] Verify if API specs need to be regenerated.
|
||||
|
||||
@@ -6,7 +6,6 @@ on:
|
||||
- "master"
|
||||
paths:
|
||||
- "api/**"
|
||||
- "prowler/**"
|
||||
- ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the code below to test this action on PRs
|
||||
@@ -77,12 +76,12 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
@@ -95,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
|
||||
4
.github/workflows/api-codeql.yml
vendored
4
.github/workflows/api-codeql.yml
vendored
@@ -48,12 +48,12 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
36
.github/workflows/api-pull-request.yml
vendored
36
.github/workflows/api-pull-request.yml
vendored
@@ -28,10 +28,6 @@ env:
|
||||
VALKEY_DB: 0
|
||||
API_WORKING_DIR: ./api
|
||||
IMAGE_NAME: prowler-api
|
||||
IGNORE_FILES: |
|
||||
api/docs/**
|
||||
api/README.md
|
||||
api/CHANGELOG.md
|
||||
|
||||
jobs:
|
||||
test:
|
||||
@@ -82,7 +78,12 @@ jobs:
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
files_ignore: |
|
||||
api/.github/**
|
||||
api/docs/**
|
||||
api/permissions/**
|
||||
api/README.md
|
||||
api/mkdocs.yml
|
||||
|
||||
- name: Replace @master with current branch in pyproject.toml
|
||||
working-directory: ./api
|
||||
@@ -112,12 +113,6 @@ jobs:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install system dependencies for xmlsec
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxml2-dev libxmlsec1-dev libxmlsec1-openssl pkg-config
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
@@ -163,10 +158,8 @@ jobs:
|
||||
- name: Safety
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
|
||||
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963,74429,76352,76353,77323,77744,77745
|
||||
poetry run safety check --ignore 70612,66963,74429
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
@@ -188,7 +181,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -197,19 +190,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
- name: Build Container
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
|
||||
67
.github/workflows/create-backport-label.yml
vendored
67
.github/workflows/create-backport-label.yml
vendored
@@ -1,67 +0,0 @@
|
||||
name: Create Backport Label
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
create_label:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Create backport label
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
OWNER_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
VERSION_ONLY=${RELEASE_TAG#v} # Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
|
||||
|
||||
# Check if it's a minor version (X.Y.0)
|
||||
if [[ "$VERSION_ONLY" =~ ^[0-9]+\.[0-9]+\.0$ ]]; then
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is a minor version. Proceeding to create backport label."
|
||||
|
||||
TWO_DIGIT_VERSION=${VERSION_ONLY%.0} # Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
|
||||
|
||||
FINAL_LABEL_NAME="backport-to-v${TWO_DIGIT_VERSION}"
|
||||
FINAL_DESCRIPTION="Backport PR to the v${TWO_DIGIT_VERSION} branch"
|
||||
|
||||
echo "Effective label name will be: ${FINAL_LABEL_NAME}"
|
||||
echo "Effective description will be: ${FINAL_DESCRIPTION}"
|
||||
|
||||
# Check if the label already exists
|
||||
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token ${GITHUB_TOKEN}" "https://api.github.com/repos/${OWNER_REPO}/labels/${FINAL_LABEL_NAME}")
|
||||
|
||||
if [ "${STATUS_CODE}" -eq 200 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' already exists."
|
||||
elif [ "${STATUS_CODE}" -eq 404 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' does not exist. Creating it..."
|
||||
# Prepare JSON data payload
|
||||
JSON_DATA=$(printf '{"name":"%s","description":"%s","color":"B60205"}' "${FINAL_LABEL_NAME}" "${FINAL_DESCRIPTION}")
|
||||
|
||||
CREATE_STATUS_CODE=$(curl -s -o /tmp/curl_create_response.json -w "%{http_code}" -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GITHUB_TOKEN}" \
|
||||
--data "${JSON_DATA}" \
|
||||
"https://api.github.com/repos/${OWNER_REPO}/labels")
|
||||
|
||||
CREATE_RESPONSE_BODY=$(cat /tmp/curl_create_response.json)
|
||||
rm -f /tmp/curl_create_response.json
|
||||
|
||||
if [ "$CREATE_STATUS_CODE" -eq 201 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' created successfully."
|
||||
else
|
||||
echo "Error creating label '${FINAL_LABEL_NAME}'. Status: $CREATE_STATUS_CODE"
|
||||
echo "Response: $CREATE_RESPONSE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error checking for label '${FINAL_LABEL_NAME}'. HTTP Status: ${STATUS_CODE}"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is not a minor version. Skipping backport label creation."
|
||||
exit 0
|
||||
fi
|
||||
2
.github/workflows/find-secrets.yml
vendored
2
.github/workflows/find-secrets.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@a05cf0859455b5b16317ee22d809887a4043cdf0 # v3.90.2
|
||||
uses: trufflesecurity/trufflehog@b06f6d72a3791308bb7ba59c2b8cb7a083bd17e4 # v3.88.26
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
266
.github/workflows/prowler-release-preparation.yml
vendored
266
.github/workflows/prowler-release-preparation.yml
vendored
@@ -1,266 +0,0 @@
|
||||
name: Prowler Release Preparation
|
||||
|
||||
run-name: Prowler Release Preparation for ${{ inputs.prowler_version }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
prowler_version:
|
||||
description: 'Prowler version to release (e.g., 5.9.0)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.inputs.prowler_version }}
|
||||
|
||||
jobs:
|
||||
prepare-release:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
python3 -m pip install --user poetry
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.name "prowler-bot"
|
||||
git config --global user.email "179230569+prowler-bot@users.noreply.github.com"
|
||||
|
||||
- name: Parse version and determine branch
|
||||
run: |
|
||||
# Validate version format (reusing pattern from sdk-bump-version.yml)
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
PATCH_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
# Export version components to environment
|
||||
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PATCH_VERSION=${PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Determine branch name (format: v5.9)
|
||||
BRANCH_NAME="v${MAJOR_VERSION}.${MINOR_VERSION}"
|
||||
echo "BRANCH_NAME=${BRANCH_NAME}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Calculate UI version (1.X.X format - matches Prowler minor version)
|
||||
UI_VERSION="1.${MINOR_VERSION}.${PATCH_VERSION}"
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Calculate API version (1.X.X format - one minor version ahead)
|
||||
API_MINOR_VERSION=$((MINOR_VERSION + 1))
|
||||
API_VERSION="1.${API_MINOR_VERSION}.${PATCH_VERSION}"
|
||||
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Prowler version: $PROWLER_VERSION"
|
||||
echo "Branch name: $BRANCH_NAME"
|
||||
echo "UI version: $UI_VERSION"
|
||||
echo "API version: $API_VERSION"
|
||||
echo "Is minor release: $([ $PATCH_VERSION -eq 0 ] && echo 'true' || echo 'false')"
|
||||
else
|
||||
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Checkout existing branch for patch release
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
run: |
|
||||
echo "Patch release detected, checking out existing branch $BRANCH_NAME..."
|
||||
if git show-ref --verify --quiet "refs/heads/$BRANCH_NAME"; then
|
||||
echo "Branch $BRANCH_NAME exists locally, checking out..."
|
||||
git checkout "$BRANCH_NAME"
|
||||
elif git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
|
||||
echo "Branch $BRANCH_NAME exists remotely, checking out..."
|
||||
git checkout -b "$BRANCH_NAME" "origin/$BRANCH_NAME"
|
||||
else
|
||||
echo "ERROR: Branch $BRANCH_NAME should exist for patch release $PROWLER_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify version in pyproject.toml
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^version = ' pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: Version mismatch in pyproject.toml (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ pyproject.toml version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in prowler/config/config.py
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^prowler_version = ' prowler/config/config.py | sed -E 's/prowler_version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: Version mismatch in prowler/config/config.py (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ prowler/config/config.py version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in api/pyproject.toml
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep '^version = ' api/pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: API version mismatch in api/pyproject.toml (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Verify prowler dependency in api/pyproject.toml
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
|
||||
echo "ERROR: Prowler dependency mismatch in api/pyproject.toml (expected: '$BRANCH_NAME_TRIMMED', found: '$CURRENT_PROWLER_REF')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
|
||||
|
||||
- name: Verify version in api/src/backend/api/v1/views.py
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep 'spectacular_settings.VERSION = ' api/src/backend/api/v1/views.py | sed -E 's/.*spectacular_settings.VERSION = "([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: API version mismatch in views.py (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/src/backend/api/v1/views.py version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Create release branch for minor release
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
echo "Minor release detected (patch = 0), creating new branch $BRANCH_NAME..."
|
||||
if git show-ref --verify --quiet "refs/heads/$BRANCH_NAME" || git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
|
||||
echo "ERROR: Branch $BRANCH_NAME already exists for minor release $PROWLER_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
|
||||
# Push the new branch first so it exists remotely
|
||||
git push origin "$BRANCH_NAME"
|
||||
|
||||
- name: Update prowler dependency in api/pyproject.toml
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
|
||||
|
||||
# Minor release: update the dependency to use the new branch
|
||||
echo "Minor release detected - updating prowler dependency from '$CURRENT_PROWLER_REF' to '$BRANCH_NAME_TRIMMED'"
|
||||
sed -i "s|prowler @ git+https://github.com/prowler-cloud/prowler.git@[^\"]*\"|prowler @ git+https://github.com/prowler-cloud/prowler.git@$BRANCH_NAME_TRIMMED\"|" api/pyproject.toml
|
||||
|
||||
# Verify the change was made
|
||||
UPDATED_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
if [ "$UPDATED_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
|
||||
echo "ERROR: Failed to update prowler dependency in api/pyproject.toml"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Update poetry lock file
|
||||
echo "Updating poetry.lock file..."
|
||||
cd api
|
||||
poetry lock
|
||||
cd ..
|
||||
|
||||
# Commit and push the changes
|
||||
git add api/pyproject.toml api/poetry.lock
|
||||
git commit -m "chore(api): update prowler dependency to $BRANCH_NAME_TRIMMED for release $PROWLER_VERSION"
|
||||
git push origin "$BRANCH_NAME"
|
||||
|
||||
echo "✓ api/pyproject.toml prowler dependency updated to: $UPDATED_PROWLER_REF"
|
||||
|
||||
- name: Extract changelog entries
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Function to extract changelog for a specific version
|
||||
extract_changelog() {
|
||||
local file="$1"
|
||||
local version="$2"
|
||||
local output_file="$3"
|
||||
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: $file not found, skipping..."
|
||||
touch "$output_file"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract changelog section for this version
|
||||
awk -v version="$version" '
|
||||
/^## \[v?'"$version"'\]/ { found=1; next }
|
||||
found && /^## \[v?[0-9]+\.[0-9]+\.[0-9]+\]/ { found=0 }
|
||||
found && !/^## \[v?'"$version"'\]/ { print }
|
||||
' "$file" > "$output_file"
|
||||
|
||||
# Remove --- separators
|
||||
sed -i '/^---$/d' "$output_file"
|
||||
|
||||
# Remove trailing empty lines
|
||||
sed -i '/^$/d' "$output_file"
|
||||
}
|
||||
|
||||
# Extract changelogs
|
||||
echo "Extracting changelog entries..."
|
||||
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
|
||||
# Combine changelogs in order: UI, API, SDK
|
||||
> combined_changelog.md
|
||||
|
||||
if [ -s "ui_changelog.md" ]; then
|
||||
echo "## UI" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat ui_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ -s "api_changelog.md" ]; then
|
||||
echo "## API" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat api_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ -s "prowler_changelog.md" ]; then
|
||||
echo "## SDK" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat prowler_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
echo "Combined changelog preview:"
|
||||
cat combined_changelog.md
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
|
||||
with:
|
||||
tag_name: ${{ env.PROWLER_VERSION }}
|
||||
name: Prowler ${{ env.PROWLER_VERSION }}
|
||||
body_path: combined_changelog.md
|
||||
draft: true
|
||||
target_commitish: ${{ env.PATCH_VERSION == '0' && 'master' || env.BRANCH_NAME }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Clean up temporary files
|
||||
run: |
|
||||
rm -f prowler_changelog.md api_changelog.md ui_changelog.md combined_changelog.md
|
||||
@@ -1,77 +0,0 @@
|
||||
name: Check Changelog
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
|
||||
jobs:
|
||||
check-changelog:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: "api ui prowler"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get list of changed files
|
||||
id: changed_files
|
||||
run: |
|
||||
git fetch origin ${{ github.base_ref }}
|
||||
git diff --name-only origin/${{ github.base_ref }}...HEAD > changed_files.txt
|
||||
cat changed_files.txt
|
||||
|
||||
- name: Check for folder changes and changelog presence
|
||||
id: check_folders
|
||||
run: |
|
||||
missing_changelogs=""
|
||||
|
||||
for folder in $MONITORED_FOLDERS; do
|
||||
if grep -q "^${folder}/" changed_files.txt; then
|
||||
echo "Detected changes in ${folder}/"
|
||||
if ! grep -q "^${folder}/CHANGELOG.md$" changed_files.txt; then
|
||||
echo "No changelog update found for ${folder}/"
|
||||
missing_changelogs="${missing_changelogs}- \`${folder}\`\n"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "missing_changelogs<<EOF" >> $GITHUB_OUTPUT
|
||||
echo -e "${missing_changelogs}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find_comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e #v3.1.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- changelog-check -->'
|
||||
|
||||
- name: Update PR comment with changelog status
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
${{ steps.check_folders.outputs.missing_changelogs != '' && format('⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
|
||||
|
||||
{0}
|
||||
|
||||
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.', steps.check_folders.outputs.missing_changelogs) || '✅ All necessary `CHANGELOG.md` files have been updated. Great job! 🎉' }}
|
||||
|
||||
- name: Fail if changelog is missing
|
||||
if: steps.check_folders.outputs.missing_changelogs != ''
|
||||
run: |
|
||||
echo "ERROR: Missing changelog updates in some folders."
|
||||
exit 1
|
||||
19
.github/workflows/pull-request-merged.yml
vendored
19
.github/workflows/pull-request-merged.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.event.pull_request.merge_commit_sha }})
|
||||
shortSha=$(git rev-parse --short ${{ github.sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Trigger pull request
|
||||
@@ -27,12 +27,11 @@ jobs:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-pull-request-merged
|
||||
client-payload: |
|
||||
{
|
||||
"PROWLER_COMMIT_SHA": "${{ github.event.pull_request.merge_commit_sha }}",
|
||||
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
|
||||
"PROWLER_PR_TITLE": ${{ toJson(github.event.pull_request.title) }},
|
||||
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
|
||||
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
|
||||
"PROWLER_PR_URL": ${{ toJson(github.event.pull_request.html_url) }}
|
||||
}
|
||||
client-payload: '{
|
||||
"PROWLER_COMMIT_SHA": "${{ github.sha }}",
|
||||
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
|
||||
"PROWLER_PR_TITLE": "${{ github.event.pull_request.title }}",
|
||||
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
|
||||
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
|
||||
"PROWLER_PR_URL":${{ toJson(github.event.pull_request.html_url) }}
|
||||
}'
|
||||
|
||||
@@ -123,11 +123,11 @@ jobs:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
@@ -140,7 +140,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
|
||||
3
.github/workflows/sdk-bump-version.yml
vendored
3
.github/workflows/sdk-bump-version.yml
vendored
@@ -12,6 +12,7 @@ env:
|
||||
jobs:
|
||||
bump-version:
|
||||
name: Bump Version
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -96,7 +97,6 @@ jobs:
|
||||
commit-message: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.BUMP_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
labels: no-changelog
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -135,7 +135,6 @@ jobs:
|
||||
commit-message: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.PATCH_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
labels: no-changelog
|
||||
body: |
|
||||
### Description
|
||||
|
||||
|
||||
4
.github/workflows/sdk-codeql.yml
vendored
4
.github/workflows/sdk-codeql.yml
vendored
@@ -56,12 +56,12 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
26
.github/workflows/sdk-pull-request.yml
vendored
26
.github/workflows/sdk-pull-request.yml
vendored
@@ -102,15 +102,8 @@ jobs:
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
|
||||
|
||||
- name: Dockerfile - Check if Dockerfile has changed
|
||||
id: dockerfile-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
Dockerfile
|
||||
|
||||
- name: Hadolint
|
||||
if: steps.dockerfile-changed-files.outputs.any_changed == 'true'
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
@@ -219,21 +212,6 @@ jobs:
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
|
||||
|
||||
# Test IaC
|
||||
- name: IaC - Check if any file has changed
|
||||
id: iac-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/iac/**
|
||||
./tests/providers/iac/**
|
||||
.poetry.lock
|
||||
|
||||
- name: IaC - Test
|
||||
if: steps.iac-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
|
||||
|
||||
# Common Tests
|
||||
- name: Lib - Test
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
@@ -248,7 +226,7 @@ jobs:
|
||||
# Codecov
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@b47578312673ae6fa5b5096b330d9fbac3d116df # v4.2.1
|
||||
uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 # v4.1.0
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
|
||||
@@ -30,7 +30,6 @@ env:
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
|
||||
NEXT_PUBLIC_API_BASE_URL: http://prowler-api:8080/api/v1
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
@@ -77,17 +76,16 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ env.SHORT_SHA }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
@@ -98,12 +96,11 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${{ env.RELEASE_TAG }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
|
||||
4
.github/workflows/ui-codeql.yml
vendored
4
.github/workflows/ui-codeql.yml
vendored
@@ -48,12 +48,12 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
98
.github/workflows/ui-e2e-tests.yml
vendored
98
.github/workflows/ui-e2e-tests.yml
vendored
@@ -1,98 +0,0 @@
|
||||
name: UI - E2E Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- "v5.*"
|
||||
paths:
|
||||
- '.github/workflows/ui-e2e-tests.yml'
|
||||
- 'ui/**'
|
||||
|
||||
jobs:
|
||||
e2e-tests:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AUTH_SECRET: 'fallback-ci-secret-for-testing'
|
||||
AUTH_TRUST_HOST: true
|
||||
NEXTAUTH_URL: 'http://localhost:3000'
|
||||
NEXT_PUBLIC_API_BASE_URL: 'http://localhost:8080/api/v1'
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Start API services
|
||||
run: |
|
||||
# Override docker-compose image tag to use latest instead of stable
|
||||
# This overrides any PROWLER_API_VERSION set in .env file
|
||||
export PROWLER_API_VERSION=latest
|
||||
echo "Using PROWLER_API_VERSION=${PROWLER_API_VERSION}"
|
||||
docker compose up -d api worker worker-beat
|
||||
- name: Wait for API to be ready
|
||||
run: |
|
||||
echo "Waiting for prowler-api..."
|
||||
timeout=150 # 5 minutes max
|
||||
elapsed=0
|
||||
while [ $elapsed -lt $timeout ]; do
|
||||
if curl -s ${NEXT_PUBLIC_API_BASE_URL}/docs >/dev/null 2>&1; then
|
||||
echo "Prowler API is ready!"
|
||||
exit 0
|
||||
fi
|
||||
echo "Waiting for prowler-api... (${elapsed}s elapsed)"
|
||||
sleep 5
|
||||
elapsed=$((elapsed + 5))
|
||||
done
|
||||
echo "Timeout waiting for prowler-api to start"
|
||||
exit 1
|
||||
- name: Load database fixtures for E2E tests
|
||||
run: |
|
||||
docker compose exec -T api sh -c '
|
||||
echo "Loading all fixtures from api/fixtures/dev/..."
|
||||
for fixture in api/fixtures/dev/*.json; do
|
||||
if [ -f "$fixture" ]; then
|
||||
echo "Loading $fixture"
|
||||
poetry run python manage.py loaddata "$fixture" --database admin
|
||||
fi
|
||||
done
|
||||
echo "All database fixtures loaded successfully!"
|
||||
'
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install UI dependencies
|
||||
working-directory: ./ui
|
||||
run: npm ci
|
||||
- name: Build UI application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-
|
||||
- name: Install Playwright browsers
|
||||
working-directory: ./ui
|
||||
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: npm run test:e2e:install
|
||||
- name: Run E2E tests
|
||||
working-directory: ./ui
|
||||
run: npm run test:e2e
|
||||
- name: Upload test reports
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
|
||||
if: failure()
|
||||
with:
|
||||
name: playwright-report
|
||||
path: ui/playwright-report/
|
||||
retention-days: 30
|
||||
- name: Cleanup services
|
||||
if: always()
|
||||
run: |
|
||||
echo "Shutting down services..."
|
||||
docker compose down -v || true
|
||||
echo "Cleanup completed"
|
||||
9
.github/workflows/ui-pull-request.yml
vendored
9
.github/workflows/ui-pull-request.yml
vendored
@@ -34,26 +34,23 @@ jobs:
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm ci
|
||||
run: npm install
|
||||
- name: Run Healthcheck
|
||||
working-directory: ./ui
|
||||
run: npm run healthcheck
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.UI_WORKING_DIR }}
|
||||
# Always build using `prod` target
|
||||
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -44,16 +44,6 @@ junit-reports/
|
||||
|
||||
# Cursor files
|
||||
.cursorignore
|
||||
.cursor/
|
||||
|
||||
# RooCode files
|
||||
.roo/
|
||||
.rooignore
|
||||
.roomodes
|
||||
|
||||
# Cline files
|
||||
.cline/
|
||||
.clineignore
|
||||
|
||||
# Terraform
|
||||
.terraform*
|
||||
|
||||
@@ -115,8 +115,7 @@ repos:
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
|
||||
entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353,77744,77745'
|
||||
entry: bash -c 'safety check --ignore 70612,66963,74429'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.12.11-slim-bookworm AS build
|
||||
FROM python:3.12.10-slim-bookworm AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
@@ -6,8 +6,7 @@ LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget libicu72 libunwind8 libssl3 libcurl4 ca-certificates apt-transport-https gnupg \
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends wget libicu72 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
@@ -47,6 +46,10 @@ ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
# By default poetry does not compile Python source files to bytecode during installation.
|
||||
# This speeds up the installation process, but the first execution may take a little more
|
||||
# time because Python then compiles source files to bytecode automatically. If you want to
|
||||
# compile source files to bytecode during installation, you can use the --compile option
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
|
||||
30
README.md
30
README.md
@@ -3,7 +3,7 @@
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
|
||||
</p>
|
||||
<p align="center">
|
||||
<b><i>Prowler</b> is the Open Cloud Security platform trusted by thousands to automate security and compliance in any cloud environment. With hundreds of ready-to-use checks and compliance frameworks, Prowler delivers real-time, customizable monitoring and seamless integrations, making cloud security simple, scalable, and cost-effective for organizations of any size.
|
||||
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment it secures. It is trusted by the industry leaders to uphold the highest standards in security.
|
||||
</p>
|
||||
<p align="center">
|
||||
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
|
||||
@@ -86,27 +86,21 @@ prowler dashboard
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 567 | 82 | 36 | 10 |
|
||||
| GCP | 79 | 13 | 10 | 3 |
|
||||
| Azure | 142 | 18 | 11 | 3 |
|
||||
| Kubernetes | 83 | 7 | 5 | 7 |
|
||||
| GitHub | 16 | 2 | 1 | 0 |
|
||||
| M365 | 69 | 7 | 3 | 2 |
|
||||
| AWS | 564 | 82 | 34 | 10 |
|
||||
| GCP | 79 | 13 | 7 | 3 |
|
||||
| Azure | 140 | 18 | 8 | 3 |
|
||||
| Kubernetes | 83 | 7 | 4 | 7 |
|
||||
| GitHub | 3 | 2 | 1 | 0 |
|
||||
| M365 | 44 | 2 | 2 | 0 |
|
||||
| NHN (Unofficial) | 6 | 2 | 1 | 0 |
|
||||
|
||||
> [!Note]
|
||||
> The numbers in the table are updated periodically.
|
||||
|
||||
> [!Tip]
|
||||
> For the most accurate and up-to-date information about checks, services, frameworks, and categories, visit [**Prowler Hub**](https://hub.prowler.com).
|
||||
|
||||
> [!Note]
|
||||
> Use the following commands to list Prowler's available checks, services, compliance frameworks, and categories: `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
|
||||
|
||||
# 💻 Installation
|
||||
|
||||
## Prowler App
|
||||
|
||||
Installing Prowler App
|
||||
Prowler App offers flexible installation methods tailored to various environments:
|
||||
|
||||
> For detailed instructions on using Prowler App, refer to the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
|
||||
@@ -136,14 +130,6 @@ If your workstation's architecture is incompatible, you can resolve this by:
|
||||
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
|
||||
### Common Issues with Docker Pull Installation
|
||||
|
||||
> [!Note]
|
||||
If you want to use AWS role assumption (e.g., with the "Connect assuming IAM Role" option), you may need to mount your local `.aws` directory into the container as a volume (e.g., `- "${HOME}/.aws:/home/prowler/.aws:ro"`). There are several ways to configure credentials for Docker containers. See the [Troubleshooting](./docs/troubleshooting.md) section for more details and examples.
|
||||
|
||||
You can find more information in the [Troubleshooting](./docs/troubleshooting.md) section.
|
||||
|
||||
|
||||
### From GitHub
|
||||
|
||||
**Requirements**
|
||||
|
||||
168
api/.gitignore
vendored
Normal file
168
api/.gitignore
vendored
Normal file
@@ -0,0 +1,168 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
/_data/
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||
.pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
*.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
|
||||
# VSCode
|
||||
.vscode/
|
||||
91
api/.pre-commit-config.yaml
Normal file
91
api/.pre-commit-config.yaml
Normal file
@@ -0,0 +1,91 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: ["--unsafe"]
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: no-commit-to-branch
|
||||
- id: pretty-format-json
|
||||
args: ["--autofix", "--no-sort-keys", "--no-ensure-ascii"]
|
||||
exclude: 'src/backend/api/fixtures/dev/.*\.json$'
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
files: pyproject.toml
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.10.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
## PYTHON
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
args: ["--directory=src"]
|
||||
- id: poetry-lock
|
||||
args: ["--no-update", "--directory=src"]
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013", "Dockerfile"]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: bash -c 'poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: trufflehog
|
||||
name: TruffleHog
|
||||
description: Detect secrets in your data.
|
||||
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["commit", "push"]
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'poetry run bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'poetry run safety check --ignore 70612,66963,74429'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'poetry run vulture --exclude "contrib,.venv,tests,conftest.py" --min-confidence 100 .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
186
api/CHANGELOG.md
186
api/CHANGELOG.md
@@ -2,143 +2,14 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.11.0] (Prowler 5.10.0)
|
||||
|
||||
### Added
|
||||
- Github provider support [(#8271)](https://github.com/prowler-cloud/prowler/pull/8271)
|
||||
- Integration with Amazon S3, enabling storage and retrieval of scan data via S3 buckets [(#8056)](https://github.com/prowler-cloud/prowler/pull/8056)
|
||||
|
||||
### Fixed
|
||||
- Avoid sending errors to Sentry in M365 provider when user authentication fails [(#8420)](https://github.com/prowler-cloud/prowler/pull/8420)
|
||||
|
||||
---
|
||||
|
||||
## [1.10.2] (Prowler v5.9.2)
|
||||
|
||||
### Changed
|
||||
- Optimized queries for resources views [(#8336)](https://github.com/prowler-cloud/prowler/pull/8336)
|
||||
|
||||
---
|
||||
|
||||
## [v1.10.1] (Prowler v5.9.1)
|
||||
|
||||
### Fixed
|
||||
- Calculate failed findings during scans to prevent heavy database queries [(#8322)](https://github.com/prowler-cloud/prowler/pull/8322)
|
||||
|
||||
---
|
||||
|
||||
## [v1.10.0] (Prowler v5.9.0)
|
||||
|
||||
### Added
|
||||
- SSO with SAML support [(#8175)](https://github.com/prowler-cloud/prowler/pull/8175)
|
||||
- `GET /resources/metadata`, `GET /resources/metadata/latest` and `GET /resources/latest` to expose resource metadata and latest scan results [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
|
||||
### Changed
|
||||
- `/processors` endpoints to post-process findings. Currently, only the Mutelist processor is supported to allow to mute findings.
|
||||
- Optimized the underlying queries for resources endpoints [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- Optimized include parameters for resources view [(#8229)](https://github.com/prowler-cloud/prowler/pull/8229)
|
||||
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
|
||||
|
||||
### Fixed
|
||||
- Search filter for findings and resources [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- RBAC is now applied to `GET /overviews/providers` [(#8277)](https://github.com/prowler-cloud/prowler/pull/8277)
|
||||
|
||||
### Changed
|
||||
- `POST /schedules/daily` returns a `409 CONFLICT` if already created [(#8258)](https://github.com/prowler-cloud/prowler/pull/8258)
|
||||
|
||||
### Security
|
||||
- Enhanced password validation to enforce 12+ character passwords with special characters, uppercase, lowercase, and numbers [(#8225)](https://github.com/prowler-cloud/prowler/pull/8225)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.1] (Prowler v5.8.1)
|
||||
|
||||
### Added
|
||||
- Custom exception for provider connection errors during scans [(#8234)](https://github.com/prowler-cloud/prowler/pull/8234)
|
||||
|
||||
### Changed
|
||||
- Summary and overview tasks now use a dedicated queue and no longer propagate errors to compliance tasks [(#8214)](https://github.com/prowler-cloud/prowler/pull/8214)
|
||||
|
||||
### Fixed
|
||||
- Scan with no resources will not trigger legacy code for findings metadata [(#8183)](https://github.com/prowler-cloud/prowler/pull/8183)
|
||||
- Invitation email comparison case-insensitive [(#8206)](https://github.com/prowler-cloud/prowler/pull/8206)
|
||||
|
||||
### Removed
|
||||
- Validation of the provider's secret type during updates [(#8197)](https://github.com/prowler-cloud/prowler/pull/8197)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.0] (Prowler v5.8.0)
|
||||
|
||||
### Added
|
||||
- Support GCP Service Account key [(#7824)](https://github.com/prowler-cloud/prowler/pull/7824)
|
||||
- `GET /compliance-overviews` endpoints to retrieve compliance metadata and specific requirements statuses [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Lighthouse configuration support [(#7848)](https://github.com/prowler-cloud/prowler/pull/7848)
|
||||
|
||||
### Changed
|
||||
- Reworked `GET /compliance-overviews` to return proper requirement metrics [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Optional `user` and `password` for M365 provider [(#7992)](https://github.com/prowler-cloud/prowler/pull/7992)
|
||||
|
||||
### Fixed
|
||||
- Scheduled scans are no longer deleted when their daily schedule run is disabled [(#8082)](https://github.com/prowler-cloud/prowler/pull/8082)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.5] (Prowler v5.7.5)
|
||||
|
||||
### Fixed
|
||||
- Normalize provider UID to ensure safe and unique export directory paths [(#8007)](https://github.com/prowler-cloud/prowler/pull/8007).
|
||||
- Blank resource types in `/metadata` endpoints [(#8027)](https://github.com/prowler-cloud/prowler/pull/8027)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.4] (Prowler v5.7.4)
|
||||
|
||||
### Removed
|
||||
- Reverted RLS transaction handling and DB custom backend [(#7994)](https://github.com/prowler-cloud/prowler/pull/7994)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.3] (Prowler v5.7.3)
|
||||
|
||||
### Added
|
||||
- Database backend to handle already closed connections [(#7935)](https://github.com/prowler-cloud/prowler/pull/7935)
|
||||
|
||||
### Changed
|
||||
- Renamed field encrypted_password to password for M365 provider [(#7784)](https://github.com/prowler-cloud/prowler/pull/7784)
|
||||
|
||||
### Fixed
|
||||
- Transaction persistence with RLS operations [(#7916)](https://github.com/prowler-cloud/prowler/pull/7916)
|
||||
- Reverted the change `get_with_retry` to use the original `get` method for retrieving tasks [(#7932)](https://github.com/prowler-cloud/prowler/pull/7932)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.2] (Prowler v5.7.2)
|
||||
|
||||
### Fixed
|
||||
- Task lookup to use task_kwargs instead of task_args for scan report resolution [(#7830)](https://github.com/prowler-cloud/prowler/pull/7830)
|
||||
- Kubernetes UID validation to allow valid context names [(#7871)](https://github.com/prowler-cloud/prowler/pull/7871)
|
||||
- Connection status verification before launching a scan [(#7831)](https://github.com/prowler-cloud/prowler/pull/7831)
|
||||
- Race condition when creating background tasks [(#7876)](https://github.com/prowler-cloud/prowler/pull/7876)
|
||||
- Error when modifying or retrieving tenants due to missing user UUID in transaction context [(#7890)](https://github.com/prowler-cloud/prowler/pull/7890)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.1] (Prowler v5.7.1)
|
||||
|
||||
### Fixed
|
||||
- Added database index to improve performance on finding lookup [(#7800)](https://github.com/prowler-cloud/prowler/pull/7800)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.0] (Prowler v5.7.0)
|
||||
|
||||
### Added
|
||||
- Huge improvements to `/findings/metadata` and resource related filters for findings [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- Improvements to `/overviews` endpoints [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- Queue to perform backfill background tasks [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- New endpoints to retrieve latest findings and metadata [(#7743)](https://github.com/prowler-cloud/prowler/pull/7743)
|
||||
- Export support for Prowler ThreatScore in M365 [(7783)](https://github.com/prowler-cloud/prowler/pull/7783)
|
||||
- Added huge improvements to `/findings/metadata` and resource related filters for findings [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
|
||||
- Added improvements to `/overviews` endpoints [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
|
||||
- Added new queue to perform backfill background tasks [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
|
||||
- Added new endpoints to retrieve latest findings and metadata [(#7743)](https://github.com/prowler-cloud/prowler/pull/7743).
|
||||
- Added export support for Prowler ThreatScore in M365 [(7783)](https://github.com/prowler-cloud/prowler/pull/7783)
|
||||
|
||||
---
|
||||
|
||||
@@ -146,9 +17,9 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
### Added
|
||||
|
||||
- M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563)
|
||||
- `compliance/` folder and ZIP‐export functionality for all compliance reports [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
|
||||
- API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
|
||||
- Added M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563).
|
||||
- Added a `compliance/` folder and ZIP‐export functionality for all compliance reports.[(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
|
||||
- Added a new API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
|
||||
|
||||
---
|
||||
|
||||
@@ -156,42 +27,47 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
### Added
|
||||
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167)
|
||||
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289)
|
||||
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333)
|
||||
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378)
|
||||
- Missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318)
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167).
|
||||
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289).
|
||||
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333).
|
||||
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378).
|
||||
- Added missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.4] (Prowler v5.4.4)
|
||||
|
||||
### Fixed
|
||||
- Bug with periodic tasks when trying to delete a provider [(#7466)](https://github.com/prowler-cloud/prowler/pull/7466)
|
||||
- Fixed a bug with periodic tasks when trying to delete a provider ([#7466])(https://github.com/prowler-cloud/prowler/pull/7466).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.3] (Prowler v5.4.3)
|
||||
|
||||
### Fixed
|
||||
- Duplicated scheduled scans handling [(#7401)](https://github.com/prowler-cloud/prowler/pull/7401)
|
||||
- Environment variable to configure the deletion task batch size [(#7423)](https://github.com/prowler-cloud/prowler/pull/7423)
|
||||
- Added duplicated scheduled scans handling ([#7401])(https://github.com/prowler-cloud/prowler/pull/7401).
|
||||
- Added environment variable to configure the deletion task batch size ([#7423])(https://github.com/prowler-cloud/prowler/pull/7423).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.2] (Prowler v5.4.2)
|
||||
|
||||
### Changed
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349)
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.1] (Prowler v5.4.1)
|
||||
|
||||
### Fixed
|
||||
- Handle response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183)
|
||||
- Race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172)
|
||||
- Handle exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283)
|
||||
- Added a handled response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183).
|
||||
- Fixed a race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172).
|
||||
- Handled exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283).
|
||||
|
||||
|
||||
### Added
|
||||
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167).
|
||||
|
||||
---
|
||||
|
||||
@@ -199,20 +75,20 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
### Added
|
||||
- Social login integration with Google and GitHub [(#6906)](https://github.com/prowler-cloud/prowler/pull/6906)
|
||||
- API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878)
|
||||
- Add API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878).
|
||||
- Configurable Sentry integration [(#6874)](https://github.com/prowler-cloud/prowler/pull/6874)
|
||||
|
||||
### Changed
|
||||
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019)
|
||||
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019).
|
||||
|
||||
---
|
||||
|
||||
## [v1.4.0] (Prowler v5.3.0)
|
||||
|
||||
### Changed
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700)
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800)
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863)
|
||||
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869)
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700).
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800).
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863).
|
||||
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
|
||||
|
||||
---
|
||||
|
||||
@@ -6,19 +6,7 @@ ARG POWERSHELL_VERSION=7.5.0
|
||||
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget \
|
||||
libicu72 \
|
||||
gcc \
|
||||
g++ \
|
||||
make \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
pkg-config \
|
||||
libtool \
|
||||
libxslt1-dev \
|
||||
python3-dev \
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends wget libicu72 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
@@ -44,25 +32,23 @@ USER prowler
|
||||
|
||||
WORKDIR /home/prowler
|
||||
|
||||
# Ensure output directory exists
|
||||
RUN mkdir -p /tmp/prowler_api_output
|
||||
|
||||
COPY pyproject.toml ./
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
# Add `--no-root` to avoid installing the current project as a package
|
||||
RUN poetry install --no-root && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
|
||||
|
||||
WORKDIR /home/prowler/backend
|
||||
|
||||
# Development image
|
||||
|
||||
@@ -257,7 +257,7 @@ cd src/backend
|
||||
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
|
||||
```
|
||||
|
||||
> The default credentials are `dev@prowler.com:Thisisapassword123@` or `dev2@prowler.com:Thisisapassword123@`
|
||||
> The default credentials are `dev@prowler.com:thisisapassword123` or `dev2@prowler.com:thisisapassword123`
|
||||
|
||||
## Run tests
|
||||
|
||||
|
||||
125
api/docker-compose.yml
Normal file
125
api/docker-compose.yml
Normal file
@@ -0,0 +1,125 @@
|
||||
services:
|
||||
api:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-api
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8000}:${DJANGO_PORT:-8000}"
|
||||
profiles:
|
||||
- prod
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "prod"
|
||||
|
||||
api-dev:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
target: dev
|
||||
image: prowler-api-dev
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=config.django.devel
|
||||
- DJANGO_LOGGING_FORMATTER=human_readable
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8080}:${DJANGO_PORT:-8080}"
|
||||
volumes:
|
||||
- "./src/backend:/home/prowler/backend"
|
||||
- "./pyproject.toml:/home/prowler/pyproject.toml"
|
||||
profiles:
|
||||
- dev
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "dev"
|
||||
|
||||
postgres:
|
||||
image: postgres:16.3-alpine
|
||||
ports:
|
||||
- "${POSTGRES_PORT:-5432}:${POSTGRES_PORT:-5432}"
|
||||
hostname: "postgres-db"
|
||||
volumes:
|
||||
- ./_data/postgres:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=${POSTGRES_ADMIN_USER:-prowler}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_ADMIN_PASSWORD:-S3cret}
|
||||
- POSTGRES_DB=${POSTGRES_DB:-prowler_db}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_ADMIN_USER:-prowler} -d ${POSTGRES_DB:-prowler_db}'"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine3.19
|
||||
ports:
|
||||
- "${VALKEY_PORT:-6379}:6379"
|
||||
hostname: "valkey"
|
||||
volumes:
|
||||
- ./_data/valkey:/data
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'valkey-cli ping'"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
worker:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "worker"
|
||||
|
||||
worker-beat:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "beat"
|
||||
@@ -3,10 +3,6 @@
|
||||
|
||||
apply_migrations() {
|
||||
echo "Applying database migrations..."
|
||||
|
||||
# Fix Inconsistent migration history after adding sites app
|
||||
poetry run python manage.py check_and_fix_socialaccount_sites_migration --database admin
|
||||
|
||||
poetry run python manage.py migrate --database admin
|
||||
}
|
||||
|
||||
@@ -32,7 +28,7 @@ start_prod_server() {
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview,integrations -E --max-tasks-per-child 1
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill -E --max-tasks-per-child 1
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
|
||||
1112
api/poetry.lock
generated
1112
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -7,8 +7,8 @@ authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django==5.1.10",
|
||||
"django-allauth[saml] (>=65.8.0,<66.0.0)",
|
||||
"django==5.1.8",
|
||||
"django-allauth==65.4.1",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
"django-cors-headers==4.4.0",
|
||||
@@ -23,14 +23,11 @@ dependencies = [
|
||||
"drf-spectacular==0.27.2",
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"lxml==5.3.2",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.10",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.7",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
"uuid6==2024.7.10",
|
||||
"openai (>=1.82.0,<2.0.0)",
|
||||
"xmlsec==1.3.14"
|
||||
"uuid6==2024.7.10"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
@@ -38,7 +35,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.11.2"
|
||||
version = "1.8.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -17,8 +17,6 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
def pre_social_login(self, request, sociallogin):
|
||||
# Link existing accounts with the same email address
|
||||
email = sociallogin.account.extra_data.get("email")
|
||||
if sociallogin.provider.id == "saml":
|
||||
email = sociallogin.user.email
|
||||
if email:
|
||||
existing_user = self.get_user_by_email(email)
|
||||
if existing_user:
|
||||
@@ -31,41 +29,33 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
"""
|
||||
with transaction.atomic(using=MainRouter.admin_db):
|
||||
user = super().save_user(request, sociallogin, form)
|
||||
provider = sociallogin.provider.id
|
||||
extra = sociallogin.account.extra_data
|
||||
|
||||
if provider != "saml":
|
||||
# Handle other providers (e.g., GitHub, Google)
|
||||
user.save(using=MainRouter.admin_db)
|
||||
social_account_name = sociallogin.account.extra_data.get("name")
|
||||
if social_account_name:
|
||||
user.name = social_account_name
|
||||
user.save(using=MainRouter.admin_db)
|
||||
social_account_name = extra.get("name")
|
||||
if social_account_name:
|
||||
user.name = social_account_name
|
||||
user.save(using=MainRouter.admin_db)
|
||||
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(
|
||||
name=f"{user.email.split('@')[0]} default tenant"
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(
|
||||
name=f"{user.email.split('@')[0]} default tenant"
|
||||
)
|
||||
with rls_transaction(str(tenant.id)):
|
||||
Membership.objects.using(MainRouter.admin_db).create(
|
||||
user=user, tenant=tenant, role=Membership.RoleChoices.OWNER
|
||||
)
|
||||
role = Role.objects.using(MainRouter.admin_db).create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
with rls_transaction(str(tenant.id)):
|
||||
Membership.objects.using(MainRouter.admin_db).create(
|
||||
user=user, tenant=tenant, role=Membership.RoleChoices.OWNER
|
||||
)
|
||||
role = Role.objects.using(MainRouter.admin_db).create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
else:
|
||||
request.session["saml_user_created"] = str(user.id)
|
||||
|
||||
return user
|
||||
|
||||
@@ -109,6 +109,16 @@ class BaseTenantViewset(BaseViewSet):
|
||||
pass # Tenant might not exist, handle gracefully
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
if (
|
||||
request.resolver_match.url_name != "tenant-detail"
|
||||
and request.method != "DELETE"
|
||||
):
|
||||
user_id = str(request.user.id)
|
||||
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
# TODO: DRY this when we have time
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
@@ -116,8 +126,8 @@ class BaseTenantViewset(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
user_id = str(request.user.id)
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
@@ -190,16 +190,10 @@ def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
total_checks = len(requirement.Checks)
|
||||
checks_dict = {check: None for check in requirement.Checks}
|
||||
|
||||
req_status_val = "MANUAL" if total_checks == 0 else "PASS"
|
||||
|
||||
# Build requirement dictionary
|
||||
requirement_dict = {
|
||||
"name": requirement.Name or requirement.Id,
|
||||
"description": requirement.Description,
|
||||
"tactics": getattr(requirement, "Tactics", []),
|
||||
"subtechniques": getattr(requirement, "SubTechniques", []),
|
||||
"platforms": getattr(requirement, "Platforms", []),
|
||||
"technique_url": getattr(requirement, "TechniqueURL", ""),
|
||||
"attributes": [
|
||||
dict(attribute) for attribute in requirement.Attributes
|
||||
],
|
||||
@@ -210,18 +204,20 @@ def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
"manual": 0,
|
||||
"total": total_checks,
|
||||
},
|
||||
"status": req_status_val,
|
||||
"status": "PASS",
|
||||
}
|
||||
|
||||
# Update requirements status counts for the framework
|
||||
if req_status_val == "MANUAL":
|
||||
# Update requirements status
|
||||
if total_checks == 0:
|
||||
requirements_status["manual"] += 1
|
||||
elif req_status_val == "PASS":
|
||||
requirements_status["passed"] += 1
|
||||
|
||||
# Add requirement to compliance requirements
|
||||
compliance_requirements[requirement.Id] = requirement_dict
|
||||
|
||||
# Calculate pending requirements
|
||||
pending_requirements = total_requirements - requirements_status["manual"]
|
||||
requirements_status["passed"] = pending_requirements
|
||||
|
||||
# Build compliance dictionary
|
||||
compliance_dict = {
|
||||
"framework": compliance_data.Framework,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import re
|
||||
import secrets
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
@@ -153,51 +152,6 @@ def delete_related_daily_task(provider_id: str):
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
|
||||
|
||||
def create_objects_in_batches(
|
||||
tenant_id: str, model, objects: list, batch_size: int = 500
|
||||
):
|
||||
"""
|
||||
Bulk-create model instances in repeated, per-tenant RLS transactions.
|
||||
|
||||
All chunks execute in their own transaction, so no single transaction
|
||||
grows too large.
|
||||
|
||||
Args:
|
||||
tenant_id (str): UUID string of the tenant under which to set RLS.
|
||||
model: Django model class whose `.objects.bulk_create()` will be called.
|
||||
objects (list): List of model instances (unsaved) to bulk-create.
|
||||
batch_size (int): Maximum number of objects per bulk_create call.
|
||||
"""
|
||||
total = len(objects)
|
||||
for i in range(0, total, batch_size):
|
||||
chunk = objects[i : i + batch_size]
|
||||
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
|
||||
model.objects.bulk_create(chunk, batch_size)
|
||||
|
||||
|
||||
def update_objects_in_batches(
|
||||
tenant_id: str, model, objects: list, fields: list, batch_size: int = 500
|
||||
):
|
||||
"""
|
||||
Bulk-update model instances in repeated, per-tenant RLS transactions.
|
||||
|
||||
All chunks execute in their own transaction, so no single transaction
|
||||
grows too large.
|
||||
|
||||
Args:
|
||||
tenant_id (str): UUID string of the tenant under which to set RLS.
|
||||
model: Django model class whose `.objects.bulk_update()` will be called.
|
||||
objects (list): List of model instances (saved) to bulk-update.
|
||||
fields (list): List of field names to update.
|
||||
batch_size (int): Maximum number of objects per bulk_update call.
|
||||
"""
|
||||
total = len(objects)
|
||||
for start in range(0, total, batch_size):
|
||||
chunk = objects[start : start + batch_size]
|
||||
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
|
||||
model.objects.bulk_update(chunk, fields, batch_size)
|
||||
|
||||
|
||||
# Postgres Enums
|
||||
|
||||
|
||||
@@ -273,72 +227,6 @@ def register_enum(apps, schema_editor, enum_class): # noqa: F841
|
||||
register_adapter(enum_class, enum_adapter)
|
||||
|
||||
|
||||
def _should_create_index_on_partition(
|
||||
partition_name: str, all_partitions: bool = False
|
||||
) -> bool:
|
||||
"""
|
||||
Determine if we should create an index on this partition.
|
||||
|
||||
Args:
|
||||
partition_name: The name of the partition (e.g., "findings_2025_aug", "findings_default")
|
||||
all_partitions: If True, create on all partitions. If False, only current/future partitions.
|
||||
|
||||
Returns:
|
||||
bool: True if index should be created on this partition, False otherwise.
|
||||
"""
|
||||
if all_partitions:
|
||||
return True
|
||||
|
||||
# Extract date from partition name if it follows the pattern
|
||||
# Partition names look like: findings_2025_aug, findings_2025_jul, etc.
|
||||
date_pattern = r"(\d{4})_([a-z]{3})$"
|
||||
match = re.search(date_pattern, partition_name)
|
||||
|
||||
if not match:
|
||||
# If we can't parse the date, include it to be safe (e.g., default partition)
|
||||
return True
|
||||
|
||||
try:
|
||||
year_str, month_abbr = match.groups()
|
||||
year = int(year_str)
|
||||
|
||||
# Map month abbreviations to numbers
|
||||
month_map = {
|
||||
"jan": 1,
|
||||
"feb": 2,
|
||||
"mar": 3,
|
||||
"apr": 4,
|
||||
"may": 5,
|
||||
"jun": 6,
|
||||
"jul": 7,
|
||||
"aug": 8,
|
||||
"sep": 9,
|
||||
"oct": 10,
|
||||
"nov": 11,
|
||||
"dec": 12,
|
||||
}
|
||||
|
||||
month = month_map.get(month_abbr.lower())
|
||||
if month is None:
|
||||
# Unknown month abbreviation, include it to be safe
|
||||
return True
|
||||
|
||||
partition_date = datetime(year, month, 1, tzinfo=timezone.utc)
|
||||
|
||||
# Get current month start
|
||||
now = datetime.now(timezone.utc)
|
||||
current_month_start = now.replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
|
||||
# Include current month and future partitions
|
||||
return partition_date >= current_month_start
|
||||
|
||||
except (ValueError, TypeError):
|
||||
# If date parsing fails, include it to be safe
|
||||
return True
|
||||
|
||||
|
||||
def create_index_on_partitions(
|
||||
apps, # noqa: F841
|
||||
schema_editor,
|
||||
@@ -347,39 +235,16 @@ def create_index_on_partitions(
|
||||
columns: str,
|
||||
method: str = "BTREE",
|
||||
where: str = "",
|
||||
all_partitions: bool = True,
|
||||
):
|
||||
"""
|
||||
Create an index on existing partitions of `parent_table`.
|
||||
Create an index on every existing partition of `parent_table`.
|
||||
|
||||
Args:
|
||||
parent_table: The name of the root table (e.g. "findings").
|
||||
index_name: A short name for the index (will be prefixed per-partition).
|
||||
columns: The parenthesized column list, e.g. "tenant_id, scan_id, status".
|
||||
method: The index method—BTREE, GIN, etc. Defaults to BTREE.
|
||||
where: Optional WHERE clause (without the leading "WHERE"), e.g. "status = 'FAIL'".
|
||||
all_partitions: Whether to create indexes on all partitions or just current/future ones.
|
||||
Defaults to False (current/future only) to avoid maintenance overhead
|
||||
on old partitions where the index may not be needed.
|
||||
|
||||
Examples:
|
||||
# Create index only on current and future partitions (recommended for new indexes)
|
||||
create_index_on_partitions(
|
||||
apps, schema_editor,
|
||||
parent_table="findings",
|
||||
index_name="new_performance_idx",
|
||||
columns="tenant_id, status, severity",
|
||||
all_partitions=False # Default behavior
|
||||
)
|
||||
|
||||
# Create index on all partitions (use when migrating existing critical indexes)
|
||||
create_index_on_partitions(
|
||||
apps, schema_editor,
|
||||
parent_table="findings",
|
||||
index_name="critical_existing_idx",
|
||||
columns="tenant_id, scan_id",
|
||||
all_partitions=True
|
||||
)
|
||||
method: The index method—BTREE, GIN, etc. Defaults to BTREE.
|
||||
where: Optional WHERE clause (without the leading "WHERE"), e.g. "status = 'FAIL'".
|
||||
"""
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
@@ -394,14 +259,13 @@ def create_index_on_partitions(
|
||||
|
||||
where_sql = f" WHERE {where}" if where else ""
|
||||
for partition in partitions:
|
||||
if _should_create_index_on_partition(partition, all_partitions):
|
||||
idx_name = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = (
|
||||
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {idx_name} "
|
||||
f"ON {partition} USING {method} ({columns})"
|
||||
f"{where_sql};"
|
||||
)
|
||||
schema_editor.execute(sql)
|
||||
idx_name = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = (
|
||||
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {idx_name} "
|
||||
f"ON {partition} USING {method} ({columns})"
|
||||
f"{where_sql};"
|
||||
)
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def drop_index_on_partitions(
|
||||
@@ -415,7 +279,7 @@ def drop_index_on_partitions(
|
||||
|
||||
Args:
|
||||
parent_table: The name of the root table (e.g. "findings").
|
||||
index_name: The same short name used when creating them.
|
||||
index_name: The same short name used when creating them.
|
||||
"""
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
@@ -552,15 +416,3 @@ class IntegrationTypeEnum(EnumType):
|
||||
class IntegrationTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("integration_type", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Processor type
|
||||
|
||||
|
||||
class ProcessorTypeEnum(EnumType):
|
||||
enum_type_name = "processor_type"
|
||||
|
||||
|
||||
class ProcessorTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("processor_type", *args, **kwargs)
|
||||
|
||||
@@ -3,7 +3,7 @@ from rest_framework import status
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework_json_api.exceptions import exception_handler
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
|
||||
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
|
||||
|
||||
|
||||
class ModelValidationError(ValidationError):
|
||||
@@ -32,36 +32,6 @@ class InvitationTokenExpiredException(APIException):
|
||||
default_code = "token_expired"
|
||||
|
||||
|
||||
# Task Management Exceptions (non-HTTP)
|
||||
class TaskManagementError(Exception):
|
||||
"""Base exception for task management errors."""
|
||||
|
||||
def __init__(self, task=None):
|
||||
self.task = task
|
||||
super().__init__()
|
||||
|
||||
|
||||
class TaskFailedException(TaskManagementError):
|
||||
"""Raised when a task has failed."""
|
||||
|
||||
|
||||
class TaskNotFoundException(TaskManagementError):
|
||||
"""Raised when a task is not found."""
|
||||
|
||||
|
||||
class TaskInProgressException(TaskManagementError):
|
||||
"""Raised when a task is running but there's no related Task object to return."""
|
||||
|
||||
def __init__(self, task_result=None):
|
||||
self.task_result = task_result
|
||||
super().__init__()
|
||||
|
||||
|
||||
# Provider connection errors
|
||||
class ProviderConnectionError(Exception):
|
||||
"""Base exception for provider connection errors."""
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
if isinstance(exc, django_validation_error):
|
||||
if hasattr(exc, "error_dict"):
|
||||
@@ -69,30 +39,7 @@ def custom_exception_handler(exc, context):
|
||||
else:
|
||||
exc = ValidationError(detail=exc.messages[0], code=exc.code)
|
||||
elif isinstance(exc, (TokenError, InvalidToken)):
|
||||
if (
|
||||
hasattr(exc, "detail")
|
||||
and isinstance(exc.detail, dict)
|
||||
and "messages" in exc.detail
|
||||
):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
|
||||
|
||||
class ConflictException(APIException):
|
||||
status_code = status.HTTP_409_CONFLICT
|
||||
default_detail = "A conflict occurred. The resource already exists."
|
||||
default_code = "conflict"
|
||||
|
||||
def __init__(self, detail=None, code=None, pointer=None):
|
||||
error_detail = {
|
||||
"detail": detail or self.default_detail,
|
||||
"status": self.status_code,
|
||||
"code": self.default_code,
|
||||
}
|
||||
|
||||
if pointer:
|
||||
error_detail["source"] = {"pointer": pointer}
|
||||
|
||||
super().__init__(detail=[error_detail])
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
|
||||
from dateutil.parser import parse
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
from django_filters.rest_framework import (
|
||||
@@ -23,13 +22,12 @@ from api.db_utils import (
|
||||
StatusEnumField,
|
||||
)
|
||||
from api.models import (
|
||||
ComplianceRequirementOverview,
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
Membership,
|
||||
PermissionChoices,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
@@ -340,8 +338,6 @@ class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
tags = CharFilter(method="filter_tag")
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
scan = UUIDFilter(field_name="provider__scan", lookup_expr="exact")
|
||||
scan__in = UUIDInFilter(field_name="provider__scan", lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
@@ -356,82 +352,6 @@ class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
|
||||
self.data.get("updated_at")
|
||||
or self.data.get("updated_at__date")
|
||||
or self.data.get("updated_at__gte")
|
||||
or self.data.get("updated_at__lte")
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "At least one date filter is required: filter[updated_at], filter[updated_at.gte], "
|
||||
"or filter[updated_at.lte].",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/updated_at"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
gte_date = (
|
||||
parse(self.data.get("updated_at__gte")).date()
|
||||
if self.data.get("updated_at__gte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
lte_date = (
|
||||
parse(self.data.get("updated_at__lte")).date()
|
||||
if self.data.get("updated_at__lte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
|
||||
if abs(lte_date - gte_date) > timedelta(
|
||||
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/updated_at"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
def filter_tag_value(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
|
||||
|
||||
def filter_tag(self, queryset, name, value):
|
||||
# We won't know what the user wants to filter on just based on the value,
|
||||
# and we don't want to build special filtering logic for every possible
|
||||
# provider tag spec, so we'll just do a full text search
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class LatestResourceFilter(ProviderRelationshipFilterSet):
|
||||
tag_key = CharFilter(method="filter_tag_key")
|
||||
tag_value = CharFilter(method="filter_tag_value")
|
||||
tag = CharFilter(method="filter_tag")
|
||||
tags = CharFilter(method="filter_tag")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"uid": ["exact", "icontains"],
|
||||
"name": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
"service": ["exact", "icontains", "in"],
|
||||
"type": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
@@ -717,11 +637,12 @@ class RoleFilter(FilterSet):
|
||||
|
||||
class ComplianceOverviewFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
scan_id = UUIDFilter(field_name="scan_id")
|
||||
region = CharFilter(field_name="region")
|
||||
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
provider_type__in = ChoiceInFilter(choices=Provider.ProviderChoices.choices)
|
||||
scan_id = UUIDFilter(field_name="scan__id")
|
||||
|
||||
class Meta:
|
||||
model = ComplianceRequirementOverview
|
||||
model = ComplianceOverview
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"compliance_id": ["exact", "icontains"],
|
||||
@@ -784,12 +705,3 @@ class IntegrationFilter(FilterSet):
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ProcessorFilter(FilterSet):
|
||||
processor_type = ChoiceFilter(choices=Processor.ProcessorChoices.choices)
|
||||
processor_type__in = ChoiceInFilter(
|
||||
choices=Processor.ProcessorChoices.choices,
|
||||
field_name="processor_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"model": "api.user",
|
||||
"pk": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"last_login": null,
|
||||
"name": "Devie Prowlerson",
|
||||
"email": "dev@prowler.com",
|
||||
@@ -16,7 +16,7 @@
|
||||
"model": "api.user",
|
||||
"pk": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"last_login": null,
|
||||
"name": "Devietoo Prowlerson",
|
||||
"email": "dev2@prowler.com",
|
||||
@@ -24,18 +24,5 @@
|
||||
"is_active": true,
|
||||
"date_joined": "2024-09-18T09:04:20.850Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.user",
|
||||
"pk": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"last_login": null,
|
||||
"name": "E2E Test User",
|
||||
"email": "e2e@prowler.com",
|
||||
"company_name": "Prowler E2E Tests",
|
||||
"is_active": true,
|
||||
"date_joined": "2024-01-01T00:00:00.850Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -46,24 +46,5 @@
|
||||
"role": "member",
|
||||
"date_joined": "2024-09-19T11:03:59.712Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.tenant",
|
||||
"pk": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"fields": {
|
||||
"inserted_at": "2024-01-01T00:00:00Z",
|
||||
"updated_at": "2024-01-01T00:00:00Z",
|
||||
"name": "E2E Test Tenant"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "9b1a2c3d-4e5f-6789-abc1-23456789def0",
|
||||
"fields": {
|
||||
"user": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
|
||||
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"role": "owner",
|
||||
"date_joined": "2024-01-01T00:00:00.000Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -149,32 +149,5 @@
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"inserted_at": "2024-11-20T15:36:14.302Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "a5b6c7d8-9e0f-1234-5678-90abcdef1234",
|
||||
"fields": {
|
||||
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"name": "e2e_admin",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": true,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-01-01T00:00:00.000Z",
|
||||
"updated_at": "2024-01-01T00:00:00.000Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.userrolerelationship",
|
||||
"pk": "f1e2d3c4-b5a6-9876-5432-10fedcba9876",
|
||||
"fields": {
|
||||
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"role": "a5b6c7d8-9e0f-1234-5678-90abcdef1234",
|
||||
"user": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
|
||||
"inserted_at": "2024-01-01T00:00:00.000Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
from django.contrib.sites.models import Site
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
|
||||
|
||||
def table_exists(table_name):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_name = %s
|
||||
)
|
||||
""",
|
||||
[table_name],
|
||||
)
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Fix migration inconsistency between socialaccount and sites"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--database",
|
||||
default=DEFAULT_DB_ALIAS,
|
||||
help="Specifies the database to operate on.",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
db = options["database"]
|
||||
connection = connections[db]
|
||||
recorder = MigrationRecorder(connection)
|
||||
|
||||
applied = set(recorder.applied_migrations())
|
||||
|
||||
has_social = ("socialaccount", "0001_initial") in applied
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_name = 'django_site'
|
||||
);
|
||||
"""
|
||||
)
|
||||
site_table_exists = cursor.fetchone()[0]
|
||||
|
||||
if has_social and not site_table_exists:
|
||||
self.stdout.write(
|
||||
f"Detected inconsistency in '{db}'. Creating 'django_site' table manually..."
|
||||
)
|
||||
|
||||
with transaction.atomic(using=db):
|
||||
with connection.schema_editor() as schema_editor:
|
||||
schema_editor.create_model(Site)
|
||||
|
||||
recorder.record_applied("sites", "0001_initial")
|
||||
recorder.record_applied("sites", "0002_alter_domain_unique")
|
||||
|
||||
self.stdout.write(
|
||||
"Fixed: 'django_site' table created and migrations registered."
|
||||
)
|
||||
|
||||
# Ensure the relationship table also exists
|
||||
if not table_exists("socialaccount_socialapp_sites"):
|
||||
self.stdout.write(
|
||||
"Detected missing 'socialaccount_socialapp_sites' table. Creating manually..."
|
||||
)
|
||||
with connection.schema_editor() as schema_editor:
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
|
||||
schema_editor.create_model(
|
||||
SocialApp._meta.get_field("sites").remote_field.through
|
||||
)
|
||||
self.stdout.write(
|
||||
"Fixed: 'socialaccount_socialapp_sites' table created."
|
||||
)
|
||||
@@ -1,29 +0,0 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0023_resources_lookup_optimization"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_uid_inserted_idx",
|
||||
columns="tenant_id, uid, inserted_at DESC",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_uid_inserted_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0024_findings_uid_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "uid", "-inserted_at"],
|
||||
name="find_tenant_uid_inserted_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,14 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0025_findings_uid_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider_secret_type ADD VALUE IF NOT EXISTS 'service_account';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,124 +0,0 @@
|
||||
# Generated by Django 5.1.8 on 2025-05-21 11:37
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0026_provider_secret_gcp_service_account"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ComplianceRequirementOverview",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("compliance_id", models.TextField(blank=False)),
|
||||
("framework", models.TextField(blank=False)),
|
||||
("version", models.TextField(blank=True)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("region", models.TextField(blank=False)),
|
||||
("requirement_id", models.TextField(blank=False)),
|
||||
(
|
||||
"requirement_status",
|
||||
api.db_utils.StatusEnumField(
|
||||
choices=[
|
||||
("FAIL", "Fail"),
|
||||
("PASS", "Pass"),
|
||||
("MANUAL", "Manual"),
|
||||
]
|
||||
),
|
||||
),
|
||||
("passed_checks", models.IntegerField(default=0)),
|
||||
("failed_checks", models.IntegerField(default=0)),
|
||||
("total_checks", models.IntegerField(default=0)),
|
||||
(
|
||||
"scan",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="compliance_requirements_overviews",
|
||||
related_query_name="compliance_requirements_overview",
|
||||
to="api.scan",
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "compliance_requirements_overviews",
|
||||
"abstract": False,
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="cro_tenant_scan_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id"],
|
||||
name="cro_scan_comp_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id", "region"],
|
||||
name="cro_scan_comp_reg_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=[
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
],
|
||||
name="cro_scan_comp_req_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=[
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
"region",
|
||||
],
|
||||
name="cro_scan_comp_req_reg_idx",
|
||||
),
|
||||
],
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=(
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
"region",
|
||||
),
|
||||
name="unique_tenant_compliance_requirement_overview",
|
||||
)
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="ComplianceRequirementOverview",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_compliancerequirementoverview",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,29 +0,0 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0027_compliance_requirement_overviews"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_check_idx",
|
||||
columns="tenant_id, scan_id, check_id",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_check_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0028_findings_check_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "check_id"],
|
||||
name="find_tenant_scan_check_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,107 +0,0 @@
|
||||
# Generated by Django 5.1.10 on 2025-06-12 12:45
|
||||
|
||||
import uuid
|
||||
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0029_findings_check_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="LighthouseConfiguration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
help_text="Name of the configuration",
|
||||
max_length=100,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
),
|
||||
),
|
||||
(
|
||||
"api_key",
|
||||
models.BinaryField(
|
||||
help_text="Encrypted API key for the LLM service"
|
||||
),
|
||||
),
|
||||
(
|
||||
"model",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("gpt-4o-2024-11-20", "GPT-4o v2024-11-20"),
|
||||
("gpt-4o-2024-08-06", "GPT-4o v2024-08-06"),
|
||||
("gpt-4o-2024-05-13", "GPT-4o v2024-05-13"),
|
||||
("gpt-4o", "GPT-4o Default"),
|
||||
("gpt-4o-mini-2024-07-18", "GPT-4o Mini v2024-07-18"),
|
||||
("gpt-4o-mini", "GPT-4o Mini Default"),
|
||||
],
|
||||
default="gpt-4o-2024-08-06",
|
||||
help_text="Must be one of the supported model names",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"temperature",
|
||||
models.FloatField(default=0, help_text="Must be between 0 and 1"),
|
||||
),
|
||||
(
|
||||
"max_tokens",
|
||||
models.IntegerField(
|
||||
default=4000, help_text="Must be between 500 and 5000"
|
||||
),
|
||||
),
|
||||
(
|
||||
"business_context",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
default="",
|
||||
help_text="Additional business context for this AI model configuration",
|
||||
),
|
||||
),
|
||||
("is_active", models.BooleanField(default=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "lighthouse_configurations",
|
||||
"abstract": False,
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id",),
|
||||
name="unique_lighthouse_config_per_tenant",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lighthouseconfiguration",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_lighthouseconfiguration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,24 +0,0 @@
|
||||
# Generated by Django 5.1.10 on 2025-06-23 10:04
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0030_lighthouseconfiguration"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="scan",
|
||||
name="scheduler_task",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="django_celery_beat.periodictask",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,150 +0,0 @@
|
||||
# Generated by Django 5.1.10 on 2025-07-02 15:47
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0031_scan_disable_on_cascade_periodic_tasks"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="integration",
|
||||
name="integration_type",
|
||||
field=api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SAMLToken",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("expires_at", models.DateTimeField(editable=False)),
|
||||
("token", models.JSONField(unique=True)),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "saml_tokens",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SAMLConfiguration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"email_domain",
|
||||
models.CharField(
|
||||
help_text="Email domain used to identify the tenant, e.g. prowlerdemo.com",
|
||||
max_length=254,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"metadata_xml",
|
||||
models.TextField(
|
||||
help_text="Raw IdP metadata XML to configure SingleSignOnService, certificates, etc."
|
||||
),
|
||||
),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "saml_configurations",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samlconfiguration",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_samlconfiguration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samlconfiguration",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant",), name="unique_samlconfig_per_tenant"
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SAMLDomainIndex",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("email_domain", models.CharField(max_length=254, unique=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "saml_domain_index",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("email_domain", "tenant"),
|
||||
name="unique_resources_by_email_domain",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="samldomainindex",
|
||||
constraint=api.rls.BaseSecurityConstraint(
|
||||
name="statements_on_samldomainindex",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,34 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import PostgresEnumMigration, ProcessorTypeEnum, register_enum
|
||||
from api.models import Processor
|
||||
|
||||
ProcessorTypeEnumMigration = PostgresEnumMigration(
|
||||
enum_name="processor_type",
|
||||
enum_values=tuple(
|
||||
processor_type[0] for processor_type in Processor.ProcessorChoices.choices
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0032_saml"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
ProcessorTypeEnumMigration.create_enum_type,
|
||||
reverse_code=ProcessorTypeEnumMigration.drop_enum_type,
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(register_enum, enum_class=ProcessorTypeEnum),
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,88 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-26 13:04
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0033_processors_enum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Processor",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"processor_type",
|
||||
api.db_utils.ProcessorTypeEnumField(
|
||||
choices=[("mutelist", "Mutelist")]
|
||||
),
|
||||
),
|
||||
("configuration", models.JSONField(default=dict)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "processors",
|
||||
"abstract": False,
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "id"], name="processor_tenant_id_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "processor_type"],
|
||||
name="processor_tenant_type_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="processor",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "processor_type"),
|
||||
name="unique_processor_types_tenant",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="processor",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_processor",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="scan",
|
||||
name="processor",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
to="api.processor",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,22 +0,0 @@
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0034_processors"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="muted_reason",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
max_length=500,
|
||||
null=True,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,30 +0,0 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0035_finding_muted_reason"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_finding_idx",
|
||||
columns="tenant_id, finding_id",
|
||||
method="BTREE",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_finding_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0036_rfm_tenant_finding_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="resourcefindingmapping",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "finding_id"],
|
||||
name="rfm_tenant_finding_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,15 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0037_rfm_tenant_finding_index_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="failed_findings_count",
|
||||
field=models.IntegerField(default=0),
|
||||
)
|
||||
]
|
||||
@@ -1,20 +0,0 @@
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0038_resource_failed_findings_count"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "-failed_findings_count", "id"],
|
||||
name="resources_failed_findings_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,30 +0,0 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0039_resource_resources_failed_findings_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_resource_idx",
|
||||
columns="tenant_id, resource_id",
|
||||
method="BTREE",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_resource_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0040_rfm_tenant_resource_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="resourcefindingmapping",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "resource_id"],
|
||||
name="rfm_tenant_resource_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,23 +0,0 @@
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0041_rfm_tenant_resource_parent_partitions"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state", "completed")),
|
||||
fields=["tenant_id", "provider_id", "-inserted_at"],
|
||||
include=("id",),
|
||||
name="scans_prov_ins_desc_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,33 +0,0 @@
|
||||
# Generated by Django 5.1.7 on 2025-07-09 14:44
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0042_scan_scans_prov_ins_desc_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="provider",
|
||||
field=api.db_utils.ProviderEnumField(
|
||||
choices=[
|
||||
("aws", "AWS"),
|
||||
("azure", "Azure"),
|
||||
("gcp", "GCP"),
|
||||
("kubernetes", "Kubernetes"),
|
||||
("m365", "M365"),
|
||||
("github", "GitHub"),
|
||||
],
|
||||
default="aws",
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'github';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
@@ -1,19 +0,0 @@
|
||||
# Generated by Django 5.1.10 on 2025-07-17 11:52
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0043_github_provider"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddConstraint(
|
||||
model_name="integration",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("configuration", "tenant"),
|
||||
name="unique_configuration_per_tenant",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
# Generated by Django 5.1.10 on 2025-07-21 16:08
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0044_integration_unique_configuration_per_tenant"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="scan",
|
||||
name="output_location",
|
||||
field=models.CharField(blank=True, max_length=4096, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,21 +1,13 @@
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
from config.custom_logging import BackendLogger
|
||||
from config.settings.social_login import SOCIALACCOUNT_PROVIDERS
|
||||
from cryptography.fernet import Fernet, InvalidToken
|
||||
from cryptography.fernet import Fernet
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import AbstractBaseUser
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.contrib.postgres.search import SearchVector, SearchVectorField
|
||||
from django.contrib.sites.models import Site
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import MinLengthValidator
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
@@ -27,14 +19,12 @@ from psqlextra.models import PostgresPartitionedModel
|
||||
from psqlextra.types import PostgresPartitioningMethod
|
||||
from uuid6 import uuid7
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import (
|
||||
CustomUserManager,
|
||||
FindingDeltaEnumField,
|
||||
IntegrationTypeEnumField,
|
||||
InvitationStateEnumField,
|
||||
MemberRoleEnumField,
|
||||
ProcessorTypeEnumField,
|
||||
ProviderEnumField,
|
||||
ProviderSecretTypeEnumField,
|
||||
ScanTriggerEnumField,
|
||||
@@ -59,8 +49,6 @@ fernet = Fernet(settings.SECRETS_ENCRYPTION_KEY.encode())
|
||||
# Convert Prowler Severity enum to Django TextChoices
|
||||
SeverityChoices = enum_to_choices(Severity)
|
||||
|
||||
logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
|
||||
class StatusChoices(models.TextChoices):
|
||||
"""
|
||||
@@ -205,7 +193,6 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
GCP = "gcp", _("GCP")
|
||||
KUBERNETES = "kubernetes", _("Kubernetes")
|
||||
M365 = "m365", _("M365")
|
||||
GITHUB = "github", _("GitHub")
|
||||
|
||||
@staticmethod
|
||||
def validate_aws_uid(value):
|
||||
@@ -255,7 +242,7 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
@staticmethod
|
||||
def validate_kubernetes_uid(value):
|
||||
if not re.match(
|
||||
r"^[a-zA-Z0-9][a-zA-Z0-9._@:\/-]{1,250}$",
|
||||
r"^[a-z0-9][A-Za-z0-9_.:\/-]{1,250}$",
|
||||
value,
|
||||
):
|
||||
raise ModelValidationError(
|
||||
@@ -266,16 +253,6 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_github_uid(value):
|
||||
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9-]{0,38}$", value):
|
||||
raise ModelValidationError(
|
||||
detail="GitHub provider ID must be a valid GitHub username or organization name (1-39 characters, "
|
||||
"starting with alphanumeric, containing only alphanumeric characters and hyphens).",
|
||||
code="github-uid",
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
@@ -421,6 +398,20 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
name = models.CharField(
|
||||
blank=True, null=True, max_length=100, validators=[MinLengthValidator(3)]
|
||||
)
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
)
|
||||
task = models.ForeignKey(
|
||||
Task,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
trigger = ScanTriggerEnumField(
|
||||
choices=TriggerChoices.choices,
|
||||
)
|
||||
@@ -436,31 +427,11 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
completed_at = models.DateTimeField(null=True, blank=True)
|
||||
next_scan_at = models.DateTimeField(null=True, blank=True)
|
||||
scheduler_task = models.ForeignKey(
|
||||
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
|
||||
)
|
||||
output_location = models.CharField(blank=True, null=True, max_length=4096)
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
)
|
||||
task = models.ForeignKey(
|
||||
Task,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
processor = models.ForeignKey(
|
||||
"Processor",
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
null=True,
|
||||
blank=True,
|
||||
PeriodicTask, on_delete=models.CASCADE, null=True, blank=True
|
||||
)
|
||||
output_location = models.CharField(blank=True, null=True, max_length=200)
|
||||
|
||||
# TODO: mutelist foreign key
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "scans"
|
||||
@@ -487,13 +458,6 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
condition=Q(state=StateChoices.COMPLETED),
|
||||
name="scans_prov_state_ins_desc_idx",
|
||||
),
|
||||
# TODO This might replace `scans_prov_state_ins_desc_idx` completely. Review usage
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id", "-inserted_at"],
|
||||
condition=Q(state=StateChoices.COMPLETED),
|
||||
include=["id"],
|
||||
name="scans_prov_ins_desc_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -579,8 +543,6 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
details = models.TextField(blank=True, null=True)
|
||||
partition = models.TextField(blank=True, null=True)
|
||||
|
||||
failed_findings_count = models.IntegerField(default=0)
|
||||
|
||||
# Relationships
|
||||
tags = models.ManyToManyField(
|
||||
ResourceTag,
|
||||
@@ -627,10 +589,6 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="resources_tenant_provider_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "-failed_findings_count", "id"],
|
||||
name="resources_failed_findings_idx",
|
||||
),
|
||||
]
|
||||
|
||||
constraints = [
|
||||
@@ -729,9 +687,6 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
check_id = models.CharField(max_length=100, blank=False, null=False)
|
||||
check_metadata = models.JSONField(default=dict, null=False)
|
||||
muted = models.BooleanField(default=False, null=False)
|
||||
muted_reason = models.TextField(
|
||||
blank=True, null=True, validators=[MinLengthValidator(3)], max_length=500
|
||||
)
|
||||
compliance = models.JSONField(default=dict, null=True, blank=True)
|
||||
|
||||
# Denormalize resource data for performance
|
||||
@@ -800,17 +755,9 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
condition=Q(delta="new"),
|
||||
name="find_delta_new_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "uid", "-inserted_at"],
|
||||
name="find_tenant_uid_inserted_idx",
|
||||
),
|
||||
GinIndex(fields=["resource_services"], name="gin_find_service_idx"),
|
||||
GinIndex(fields=["resource_regions"], name="gin_find_region_idx"),
|
||||
GinIndex(fields=["resource_types"], name="gin_find_rtype_idx"),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "check_id"],
|
||||
name="find_tenant_scan_check_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -873,16 +820,6 @@ class ResourceFindingMapping(PostgresPartitionedModel, RowLevelSecurityProtected
|
||||
# - tenant_id
|
||||
# - id
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "finding_id"],
|
||||
name="rfm_tenant_finding_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "resource_id"],
|
||||
name="rfm_tenant_resource_idx",
|
||||
),
|
||||
]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "resource_id", "finding_id"),
|
||||
@@ -909,7 +846,6 @@ class ProviderSecret(RowLevelSecurityProtectedModel):
|
||||
class TypeChoices(models.TextChoices):
|
||||
STATIC = "static", _("Key-value pairs")
|
||||
ROLE = "role", _("Role assumption")
|
||||
SERVICE_ACCOUNT = "service_account", _("GCP Service Account Key")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
@@ -987,11 +923,6 @@ class Invitation(RowLevelSecurityProtectedModel):
|
||||
null=True,
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.email:
|
||||
self.email = self.email.strip().lower()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "invitations"
|
||||
|
||||
@@ -1207,78 +1138,6 @@ class ComplianceOverview(RowLevelSecurityProtectedModel):
|
||||
resource_name = "compliance-overviews"
|
||||
|
||||
|
||||
class ComplianceRequirementOverview(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
compliance_id = models.TextField(blank=False)
|
||||
framework = models.TextField(blank=False)
|
||||
version = models.TextField(blank=True)
|
||||
description = models.TextField(blank=True)
|
||||
region = models.TextField(blank=False)
|
||||
|
||||
requirement_id = models.TextField(blank=False)
|
||||
requirement_status = StatusEnumField(choices=StatusChoices)
|
||||
passed_checks = models.IntegerField(default=0)
|
||||
failed_checks = models.IntegerField(default=0)
|
||||
total_checks = models.IntegerField(default=0)
|
||||
|
||||
scan = models.ForeignKey(
|
||||
Scan,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="compliance_requirements_overviews",
|
||||
related_query_name="compliance_requirements_overview",
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "compliance_requirements_overviews"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=(
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
"region",
|
||||
),
|
||||
name="unique_tenant_compliance_requirement_overview",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "DELETE"],
|
||||
),
|
||||
]
|
||||
indexes = [
|
||||
models.Index(fields=["tenant_id", "scan_id"], name="cro_tenant_scan_idx"),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id"],
|
||||
name="cro_scan_comp_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id", "region"],
|
||||
name="cro_scan_comp_reg_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id", "compliance_id", "requirement_id"],
|
||||
name="cro_scan_comp_req_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=[
|
||||
"tenant_id",
|
||||
"scan_id",
|
||||
"compliance_id",
|
||||
"requirement_id",
|
||||
"region",
|
||||
],
|
||||
name="cro_scan_comp_req_reg_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "compliance-requirements-overviews"
|
||||
|
||||
|
||||
class ScanSummary(RowLevelSecurityProtectedModel):
|
||||
objects = ActiveProviderManager()
|
||||
all_objects = models.Manager()
|
||||
@@ -1346,7 +1205,8 @@ class ScanSummary(RowLevelSecurityProtectedModel):
|
||||
|
||||
class Integration(RowLevelSecurityProtectedModel):
|
||||
class IntegrationChoices(models.TextChoices):
|
||||
AMAZON_S3 = "amazon_s3", _("Amazon S3")
|
||||
S3 = "amazon_s3", _("Amazon S3")
|
||||
SAML = "saml", _("SAML")
|
||||
AWS_SECURITY_HUB = "aws_security_hub", _("AWS Security Hub")
|
||||
JIRA = "jira", _("JIRA")
|
||||
SLACK = "slack", _("Slack")
|
||||
@@ -1372,10 +1232,6 @@ class Integration(RowLevelSecurityProtectedModel):
|
||||
db_table = "integrations"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("configuration", "tenant"),
|
||||
name="unique_configuration_per_tenant",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
@@ -1424,273 +1280,6 @@ class IntegrationProviderRelationship(RowLevelSecurityProtectedModel):
|
||||
]
|
||||
|
||||
|
||||
class SAMLToken(models.Model):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
expires_at = models.DateTimeField(editable=False)
|
||||
token = models.JSONField(unique=True)
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
db_table = "saml_tokens"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.expires_at:
|
||||
self.expires_at = datetime.now(timezone.utc) + timedelta(seconds=15)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def is_expired(self) -> bool:
|
||||
return datetime.now(timezone.utc) >= self.expires_at
|
||||
|
||||
|
||||
class SAMLDomainIndex(models.Model):
|
||||
"""
|
||||
Public index of SAML domains. No RLS. Used for fast lookup in SAML login flow.
|
||||
"""
|
||||
|
||||
email_domain = models.CharField(max_length=254, unique=True)
|
||||
tenant = models.ForeignKey("Tenant", on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
db_table = "saml_domain_index"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("email_domain", "tenant"),
|
||||
name="unique_resources_by_email_domain",
|
||||
),
|
||||
BaseSecurityConstraint(
|
||||
name="statements_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class SAMLConfiguration(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Stores per-tenant SAML settings, including email domain and IdP metadata.
|
||||
Automatically syncs to a SocialApp instance on save.
|
||||
|
||||
Note:
|
||||
This model exists to provide a tenant-aware abstraction over SAML configuration.
|
||||
It supports row-level security, custom validation, and metadata parsing, enabling
|
||||
Prowler to expose a clean API and admin interface for managing SAML integrations.
|
||||
|
||||
Although Django Allauth uses the SocialApp model to store provider configuration,
|
||||
it is not designed for multi-tenant use. SocialApp lacks support for tenant scoping,
|
||||
email domain mapping, and structured metadata handling.
|
||||
|
||||
By managing SAMLConfiguration separately, we ensure:
|
||||
- Strong isolation between tenants via RLS.
|
||||
- Ownership of raw IdP metadata and its validation.
|
||||
- An explicit link between SAML config and business-level identifiers (e.g. email domain).
|
||||
- Programmatic transformation into the SocialApp format used by Allauth.
|
||||
|
||||
In short, this model acts as a secure and user-friendly layer over Allauth's lower-level primitives.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
email_domain = models.CharField(
|
||||
max_length=254,
|
||||
unique=True,
|
||||
help_text="Email domain used to identify the tenant, e.g. prowlerdemo.com",
|
||||
)
|
||||
metadata_xml = models.TextField(
|
||||
help_text="Raw IdP metadata XML to configure SingleSignOnService, certificates, etc."
|
||||
)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "saml-configurations"
|
||||
|
||||
class Meta:
|
||||
db_table = "saml_configurations"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
# 1 config per tenant
|
||||
models.UniqueConstraint(
|
||||
fields=["tenant"],
|
||||
name="unique_samlconfig_per_tenant",
|
||||
),
|
||||
]
|
||||
|
||||
def clean(self, old_email_domain=None, is_create=False):
|
||||
# Domain must not contain @
|
||||
if "@" in self.email_domain:
|
||||
raise ValidationError({"email_domain": "Domain must not contain @"})
|
||||
|
||||
# Enforce at most one config per tenant
|
||||
qs = SAMLConfiguration.objects.filter(tenant=self.tenant)
|
||||
# Exclude ourselves in case of update
|
||||
if self.pk:
|
||||
qs = qs.exclude(pk=self.pk)
|
||||
if qs.exists():
|
||||
raise ValidationError(
|
||||
{"tenant": "A SAML configuration already exists for this tenant."}
|
||||
)
|
||||
|
||||
# The email domain must be unique in the entire system
|
||||
qs = SAMLConfiguration.objects.using(MainRouter.admin_db).filter(
|
||||
email_domain__iexact=self.email_domain
|
||||
)
|
||||
if qs.exists() and old_email_domain != self.email_domain:
|
||||
raise ValidationError(
|
||||
{"tenant": "There is a problem with your email domain."}
|
||||
)
|
||||
|
||||
# The entityID must be unique in the system
|
||||
idp_settings = self._parsed_metadata
|
||||
entity_id = idp_settings.get("entity_id")
|
||||
|
||||
if entity_id:
|
||||
# Find any SocialApp with this entityID
|
||||
q = SocialApp.objects.filter(provider="saml", provider_id=entity_id)
|
||||
|
||||
# If updating, exclude our own SocialApp from the check
|
||||
if not is_create:
|
||||
q = q.exclude(client_id=old_email_domain)
|
||||
else:
|
||||
q = q.exclude(client_id=self.email_domain)
|
||||
|
||||
if q.exists():
|
||||
raise ValidationError(
|
||||
{"metadata_xml": "There is a problem with your metadata."}
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.email_domain = self.email_domain.strip().lower()
|
||||
is_create = not SAMLConfiguration.objects.filter(pk=self.pk).exists()
|
||||
|
||||
if not is_create:
|
||||
old = SAMLConfiguration.objects.get(pk=self.pk)
|
||||
old_email_domain = old.email_domain
|
||||
old_metadata_xml = old.metadata_xml
|
||||
else:
|
||||
old_email_domain = None
|
||||
old_metadata_xml = None
|
||||
|
||||
self._parsed_metadata = self._parse_metadata()
|
||||
self.clean(old_email_domain, is_create)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
if is_create or (
|
||||
old_email_domain != self.email_domain
|
||||
or old_metadata_xml != self.metadata_xml
|
||||
):
|
||||
self._sync_social_app(old_email_domain)
|
||||
|
||||
# Sync the public index
|
||||
if not is_create and old_email_domain and old_email_domain != self.email_domain:
|
||||
SAMLDomainIndex.objects.filter(email_domain=old_email_domain).delete()
|
||||
|
||||
# Create/update the new domain index
|
||||
SAMLDomainIndex.objects.update_or_create(
|
||||
email_domain=self.email_domain, defaults={"tenant": self.tenant}
|
||||
)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
SocialApp.objects.filter(provider="saml", client_id=self.email_domain).delete()
|
||||
SAMLDomainIndex.objects.filter(email_domain=self.email_domain).delete()
|
||||
|
||||
def _parse_metadata(self):
|
||||
"""
|
||||
Parse the raw IdP metadata XML and extract:
|
||||
- entity_id
|
||||
- sso_url
|
||||
- slo_url (may be None)
|
||||
- x509cert (required)
|
||||
"""
|
||||
ns = {
|
||||
"md": "urn:oasis:names:tc:SAML:2.0:metadata",
|
||||
"ds": "http://www.w3.org/2000/09/xmldsig#",
|
||||
}
|
||||
try:
|
||||
root = ET.fromstring(self.metadata_xml)
|
||||
except ET.ParseError as e:
|
||||
raise ValidationError({"metadata_xml": f"Invalid XML: {e}"})
|
||||
|
||||
# Entity ID
|
||||
entity_id = root.attrib.get("entityID")
|
||||
if not entity_id:
|
||||
raise ValidationError({"metadata_xml": "Missing entityID in metadata."})
|
||||
|
||||
# SSO endpoint (must exist)
|
||||
sso = root.find(".//md:IDPSSODescriptor/md:SingleSignOnService", ns)
|
||||
if sso is None or "Location" not in sso.attrib:
|
||||
raise ValidationError(
|
||||
{"metadata_xml": "Missing SingleSignOnService in metadata."}
|
||||
)
|
||||
sso_url = sso.attrib["Location"]
|
||||
|
||||
# SLO endpoint (optional)
|
||||
slo = root.find(".//md:IDPSSODescriptor/md:SingleLogoutService", ns)
|
||||
slo_url = slo.attrib.get("Location") if slo is not None else None
|
||||
|
||||
# X.509 certificate (required)
|
||||
cert = root.find(
|
||||
'.//md:KeyDescriptor[@use="signing"]/ds:KeyInfo/ds:X509Data/ds:X509Certificate',
|
||||
ns,
|
||||
)
|
||||
if cert is None or not cert.text or not cert.text.strip():
|
||||
raise ValidationError(
|
||||
{
|
||||
"metadata_xml": 'Metadata must include a <ds:X509Certificate> under <KeyDescriptor use="signing">.'
|
||||
}
|
||||
)
|
||||
x509cert = cert.text.strip()
|
||||
|
||||
return {
|
||||
"entity_id": entity_id,
|
||||
"sso_url": sso_url,
|
||||
"slo_url": slo_url,
|
||||
"x509cert": x509cert,
|
||||
}
|
||||
|
||||
def _sync_social_app(self, previous_email_domain=None):
|
||||
"""
|
||||
Create or update the corresponding SocialApp based on email_domain.
|
||||
If the domain changed, update the matching SocialApp.
|
||||
"""
|
||||
settings_dict = SOCIALACCOUNT_PROVIDERS["saml"].copy()
|
||||
settings_dict["idp"] = self._parsed_metadata
|
||||
|
||||
current_site = Site.objects.get(id=settings.SITE_ID)
|
||||
|
||||
social_app_qs = SocialApp.objects.filter(
|
||||
provider="saml", client_id=previous_email_domain or self.email_domain
|
||||
)
|
||||
|
||||
client_id = self.email_domain[:191]
|
||||
name = f"SAML-{self.email_domain}"[:40]
|
||||
|
||||
if social_app_qs.exists():
|
||||
social_app = social_app_qs.first()
|
||||
social_app.client_id = client_id
|
||||
social_app.name = name
|
||||
social_app.settings = settings_dict
|
||||
social_app.provider_id = self._parsed_metadata["entity_id"]
|
||||
social_app.save()
|
||||
social_app.sites.set([current_site])
|
||||
else:
|
||||
social_app = SocialApp.objects.create(
|
||||
provider="saml",
|
||||
client_id=client_id,
|
||||
name=name,
|
||||
settings=settings_dict,
|
||||
provider_id=self._parsed_metadata["entity_id"],
|
||||
)
|
||||
social_app.sites.set([current_site])
|
||||
|
||||
|
||||
class ResourceScanSummary(RowLevelSecurityProtectedModel):
|
||||
scan_id = models.UUIDField(default=uuid7, db_index=True)
|
||||
resource_id = models.UUIDField(default=uuid4, db_index=True)
|
||||
@@ -1738,169 +1327,3 @@ class ResourceScanSummary(RowLevelSecurityProtectedModel):
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class LighthouseConfiguration(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Stores configuration and API keys for LLM services.
|
||||
"""
|
||||
|
||||
class ModelChoices(models.TextChoices):
|
||||
GPT_4O_2024_11_20 = "gpt-4o-2024-11-20", _("GPT-4o v2024-11-20")
|
||||
GPT_4O_2024_08_06 = "gpt-4o-2024-08-06", _("GPT-4o v2024-08-06")
|
||||
GPT_4O_2024_05_13 = "gpt-4o-2024-05-13", _("GPT-4o v2024-05-13")
|
||||
GPT_4O = "gpt-4o", _("GPT-4o Default")
|
||||
GPT_4O_MINI_2024_07_18 = "gpt-4o-mini-2024-07-18", _("GPT-4o Mini v2024-07-18")
|
||||
GPT_4O_MINI = "gpt-4o-mini", _("GPT-4o Mini Default")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
|
||||
name = models.CharField(
|
||||
max_length=100,
|
||||
validators=[MinLengthValidator(3)],
|
||||
blank=False,
|
||||
null=False,
|
||||
help_text="Name of the configuration",
|
||||
)
|
||||
api_key = models.BinaryField(
|
||||
blank=False, null=False, help_text="Encrypted API key for the LLM service"
|
||||
)
|
||||
model = models.CharField(
|
||||
max_length=50,
|
||||
choices=ModelChoices.choices,
|
||||
blank=False,
|
||||
null=False,
|
||||
default=ModelChoices.GPT_4O_2024_08_06,
|
||||
help_text="Must be one of the supported model names",
|
||||
)
|
||||
temperature = models.FloatField(default=0, help_text="Must be between 0 and 1")
|
||||
max_tokens = models.IntegerField(
|
||||
default=4000, help_text="Must be between 500 and 5000"
|
||||
)
|
||||
business_context = models.TextField(
|
||||
blank=True,
|
||||
null=False,
|
||||
default="",
|
||||
help_text="Additional business context for this AI model configuration",
|
||||
)
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# Validate temperature
|
||||
if not 0 <= self.temperature <= 1:
|
||||
raise ModelValidationError(
|
||||
detail="Temperature must be between 0 and 1",
|
||||
code="invalid_temperature",
|
||||
pointer="/data/attributes/temperature",
|
||||
)
|
||||
|
||||
# Validate max_tokens
|
||||
if not 500 <= self.max_tokens <= 5000:
|
||||
raise ModelValidationError(
|
||||
detail="Max tokens must be between 500 and 5000",
|
||||
code="invalid_max_tokens",
|
||||
pointer="/data/attributes/max_tokens",
|
||||
)
|
||||
|
||||
@property
|
||||
def api_key_decoded(self):
|
||||
"""Return the decrypted API key, or None if unavailable or invalid."""
|
||||
if not self.api_key:
|
||||
return None
|
||||
|
||||
try:
|
||||
decrypted_key = fernet.decrypt(bytes(self.api_key))
|
||||
return decrypted_key.decode()
|
||||
|
||||
except InvalidToken:
|
||||
logger.warning("Invalid token while decrypting API key.")
|
||||
except Exception as e:
|
||||
logger.exception("Unexpected error while decrypting API key: %s", e)
|
||||
|
||||
@api_key_decoded.setter
|
||||
def api_key_decoded(self, value):
|
||||
"""Store the encrypted API key."""
|
||||
if not value:
|
||||
raise ModelValidationError(
|
||||
detail="API key is required",
|
||||
code="invalid_api_key",
|
||||
pointer="/data/attributes/api_key",
|
||||
)
|
||||
|
||||
# Validate OpenAI API key format
|
||||
openai_key_pattern = r"^sk-[\w-]+T3BlbkFJ[\w-]+$"
|
||||
if not re.match(openai_key_pattern, value):
|
||||
raise ModelValidationError(
|
||||
detail="Invalid OpenAI API key format.",
|
||||
code="invalid_api_key",
|
||||
pointer="/data/attributes/api_key",
|
||||
)
|
||||
self.api_key = fernet.encrypt(value.encode())
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.full_clean()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "lighthouse_configurations"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
# Add unique constraint for name within a tenant
|
||||
models.UniqueConstraint(
|
||||
fields=["tenant_id"], name="unique_lighthouse_config_per_tenant"
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "lighthouse-configurations"
|
||||
|
||||
|
||||
class Processor(RowLevelSecurityProtectedModel):
|
||||
class ProcessorChoices(models.TextChoices):
|
||||
MUTELIST = "mutelist", _("Mutelist")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
processor_type = ProcessorTypeEnumField(choices=ProcessorChoices.choices)
|
||||
configuration = models.JSONField(default=dict)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "processors"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "processor_type"),
|
||||
name="unique_processor_types_tenant",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "id"],
|
||||
name="processor_tenant_id_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "processor_type"],
|
||||
name="processor_tenant_type_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "processors"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from drf_spectacular_jsonapi.schemas.pagination import JsonApiPageNumberPagination
|
||||
from rest_framework_json_api.pagination import JsonApiPageNumberPagination
|
||||
|
||||
|
||||
class ComplianceOverviewPagination(JsonApiPageNumberPagination):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -11,7 +11,7 @@ def test_basic_authentication():
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "Test_password@1"
|
||||
test_password = "test_password"
|
||||
|
||||
# Check that a 401 is returned when no basic authentication is provided
|
||||
no_auth_response = client.get(reverse("provider-list"))
|
||||
@@ -108,7 +108,7 @@ def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fi
|
||||
user1_email = "user1@testing.com"
|
||||
user2_email = "user2@testing.com"
|
||||
|
||||
password = "Thisisapassword123@"
|
||||
password = "thisisapassword123"
|
||||
|
||||
user1_response = client.post(
|
||||
reverse("user-list"),
|
||||
@@ -187,7 +187,7 @@ class TestTokenSwitchTenant:
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "Test_password1@"
|
||||
test_password = "test_password"
|
||||
|
||||
# Check that we can create a new user without any kind of authentication
|
||||
user_creation_response = client.post(
|
||||
|
||||
@@ -17,7 +17,7 @@ def test_delete_provider_without_executing_task(
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "Test_password1@"
|
||||
test_password = "test_password"
|
||||
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from allauth.socialaccount.models import SocialLogin
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
from api.adapters import ProwlerSocialAccountAdapter
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestProwlerSocialAccountAdapter:
|
||||
def test_get_user_by_email_returns_user(self, create_test_user):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
user = adapter.get_user_by_email(create_test_user.email)
|
||||
assert user == create_test_user
|
||||
|
||||
def test_get_user_by_email_returns_none_for_unknown_email(self):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
assert adapter.get_user_by_email("notfound@example.com") is None
|
||||
|
||||
def test_pre_social_login_links_existing_user(self, create_test_user, rf):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account.extra_data = {}
|
||||
sociallogin.user = create_test_user
|
||||
sociallogin.connect = MagicMock()
|
||||
|
||||
adapter.pre_social_login(rf.get("/"), sociallogin)
|
||||
|
||||
call_args = sociallogin.connect.call_args
|
||||
assert call_args is not None
|
||||
|
||||
called_request, called_user = call_args[0]
|
||||
assert called_request.path == "/"
|
||||
assert called_user.email == create_test_user.email
|
||||
|
||||
def test_pre_social_login_no_link_if_email_missing(self, rf):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.user = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account.extra_data = {}
|
||||
sociallogin.connect = MagicMock()
|
||||
|
||||
adapter.pre_social_login(rf.get("/"), sociallogin)
|
||||
|
||||
sociallogin.connect.assert_not_called()
|
||||
|
||||
def test_save_user_saml_sets_session_flag(self, rf):
|
||||
adapter = ProwlerSocialAccountAdapter()
|
||||
request = rf.get("/")
|
||||
request.session = {}
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.provider = MagicMock()
|
||||
sociallogin.provider.id = "saml"
|
||||
sociallogin.account = MagicMock()
|
||||
sociallogin.account.extra_data = {}
|
||||
|
||||
mock_user = MagicMock()
|
||||
mock_user.id = 123
|
||||
|
||||
with patch("api.adapters.super") as mock_super:
|
||||
with patch("api.adapters.transaction"):
|
||||
with patch("api.adapters.MainRouter"):
|
||||
mock_super.return_value.save_user.return_value = mock_user
|
||||
adapter.save_user(request, sociallogin)
|
||||
assert request.session["saml_user_created"] == "123"
|
||||
@@ -1,12 +1,12 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from api.compliance import (
|
||||
generate_compliance_overview_template,
|
||||
generate_scan_compliance,
|
||||
get_prowler_provider_checks,
|
||||
get_prowler_provider_compliance,
|
||||
load_prowler_checks,
|
||||
load_prowler_compliance,
|
||||
load_prowler_checks,
|
||||
generate_scan_compliance,
|
||||
generate_compliance_overview_template,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
@@ -69,7 +69,7 @@ class TestCompliance:
|
||||
|
||||
load_prowler_compliance()
|
||||
|
||||
from api.compliance import PROWLER_CHECKS, PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
|
||||
from api.compliance import PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE, PROWLER_CHECKS
|
||||
|
||||
assert PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE == {
|
||||
"template_key": "template_value"
|
||||
@@ -218,10 +218,6 @@ class TestCompliance:
|
||||
Description="Description of requirement 1",
|
||||
Attributes=[],
|
||||
Checks=["check1", "check2"],
|
||||
Tactics=["tactic1"],
|
||||
SubTechniques=["subtechnique1"],
|
||||
Platforms=["platform1"],
|
||||
TechniqueURL="https://example.com",
|
||||
)
|
||||
requirement2 = MagicMock(
|
||||
Id="requirement2",
|
||||
@@ -229,10 +225,6 @@ class TestCompliance:
|
||||
Description="Description of requirement 2",
|
||||
Attributes=[],
|
||||
Checks=[],
|
||||
Tactics=[],
|
||||
SubTechniques=[],
|
||||
Platforms=[],
|
||||
TechniqueURL="",
|
||||
)
|
||||
compliance1 = MagicMock(
|
||||
Requirements=[requirement1, requirement2],
|
||||
@@ -255,10 +247,6 @@ class TestCompliance:
|
||||
"requirement1": {
|
||||
"name": "Requirement 1",
|
||||
"description": "Description of requirement 1",
|
||||
"tactics": ["tactic1"],
|
||||
"subtechniques": ["subtechnique1"],
|
||||
"platforms": ["platform1"],
|
||||
"technique_url": "https://example.com",
|
||||
"attributes": [],
|
||||
"checks": {"check1": None, "check2": None},
|
||||
"checks_status": {
|
||||
@@ -272,10 +260,6 @@ class TestCompliance:
|
||||
"requirement2": {
|
||||
"name": "Requirement 2",
|
||||
"description": "Description of requirement 2",
|
||||
"tactics": [],
|
||||
"subtechniques": [],
|
||||
"platforms": [],
|
||||
"technique_url": "",
|
||||
"attributes": [],
|
||||
"checks": {},
|
||||
"checks_status": {
|
||||
@@ -284,7 +268,7 @@ class TestCompliance:
|
||||
"manual": 0,
|
||||
"total": 0,
|
||||
},
|
||||
"status": "MANUAL",
|
||||
"status": "PASS",
|
||||
},
|
||||
},
|
||||
"requirements_status": {
|
||||
|
||||
@@ -3,17 +3,12 @@ from enum import Enum
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from freezegun import freeze_time
|
||||
|
||||
from api.db_utils import (
|
||||
_should_create_index_on_partition,
|
||||
batch_delete,
|
||||
create_objects_in_batches,
|
||||
enum_to_choices,
|
||||
generate_random_token,
|
||||
one_week_from_now,
|
||||
update_objects_in_batches,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
@@ -143,173 +138,3 @@ class TestBatchDelete:
|
||||
)
|
||||
assert Provider.objects.all().count() == 0
|
||||
assert summary == {"api.Provider": create_test_providers}
|
||||
|
||||
|
||||
class TestShouldCreateIndexOnPartition:
|
||||
@freeze_time("2025-05-15 00:00:00Z")
|
||||
@pytest.mark.parametrize(
|
||||
"partition_name, all_partitions, expected",
|
||||
[
|
||||
("any_name", True, True),
|
||||
("findings_default", True, True),
|
||||
("findings_2022_jan", True, True),
|
||||
("foo_bar", False, True),
|
||||
("findings_2025_MAY", False, True),
|
||||
("findings_2025_may", False, True),
|
||||
("findings_2025_jun", False, True),
|
||||
("findings_2025_apr", False, False),
|
||||
("findings_2025_xyz", False, True),
|
||||
],
|
||||
)
|
||||
def test_partition_inclusion_logic(self, partition_name, all_partitions, expected):
|
||||
assert (
|
||||
_should_create_index_on_partition(partition_name, all_partitions)
|
||||
is expected
|
||||
)
|
||||
|
||||
@freeze_time("2025-05-15 00:00:00Z")
|
||||
def test_invalid_date_components(self):
|
||||
# even if regex matches but int conversion fails, we fallback True
|
||||
# (e.g. year too big, month number parse error)
|
||||
bad_name = "findings_99999_jan"
|
||||
assert _should_create_index_on_partition(bad_name, False) is True
|
||||
|
||||
bad_name2 = "findings_2025_abc"
|
||||
# abc not in month_map → fallback True
|
||||
assert _should_create_index_on_partition(bad_name2, False) is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestCreateObjectsInBatches:
|
||||
@pytest.fixture
|
||||
def tenant(self, tenants_fixture):
|
||||
return tenants_fixture[0]
|
||||
|
||||
def make_provider_instances(self, tenant, count):
|
||||
"""
|
||||
Return a list of `count` unsaved Provider instances for the given tenant.
|
||||
"""
|
||||
base_uid = 1000
|
||||
return [
|
||||
Provider(
|
||||
tenant=tenant,
|
||||
uid=str(base_uid + i),
|
||||
provider=Provider.ProviderChoices.AWS,
|
||||
)
|
||||
for i in range(count)
|
||||
]
|
||||
|
||||
def test_exact_multiple_of_batch(self, tenant):
|
||||
total = 6
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant)
|
||||
assert qs.count() == total
|
||||
|
||||
def test_non_multiple_of_batch(self, tenant):
|
||||
total = 7
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant)
|
||||
assert qs.count() == total
|
||||
|
||||
def test_batch_size_default(self, tenant):
|
||||
default_size = settings.DJANGO_DELETION_BATCH_SIZE
|
||||
total = default_size + 2
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant)
|
||||
assert qs.count() == total
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestUpdateObjectsInBatches:
|
||||
@pytest.fixture
|
||||
def tenant(self, tenants_fixture):
|
||||
return tenants_fixture[0]
|
||||
|
||||
def make_provider_instances(self, tenant, count):
|
||||
"""
|
||||
Return a list of `count` unsaved Provider instances for the given tenant.
|
||||
"""
|
||||
base_uid = 2000
|
||||
return [
|
||||
Provider(
|
||||
tenant=tenant,
|
||||
uid=str(base_uid + i),
|
||||
provider=Provider.ProviderChoices.AWS,
|
||||
)
|
||||
for i in range(count)
|
||||
]
|
||||
|
||||
def test_exact_multiple_of_batch(self, tenant):
|
||||
total = 6
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
# Fetch them back, mutate the `uid` field, then update in batches
|
||||
providers = list(Provider.objects.filter(tenant=tenant))
|
||||
for p in providers:
|
||||
p.uid = f"{p.uid}_upd"
|
||||
|
||||
update_objects_in_batches(
|
||||
tenant_id=str(tenant.id),
|
||||
model=Provider,
|
||||
objects=providers,
|
||||
fields=["uid"],
|
||||
batch_size=batch_size,
|
||||
)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
|
||||
assert qs.count() == total
|
||||
|
||||
def test_non_multiple_of_batch(self, tenant):
|
||||
total = 7
|
||||
batch_size = 3
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
|
||||
|
||||
providers = list(Provider.objects.filter(tenant=tenant))
|
||||
for p in providers:
|
||||
p.uid = f"{p.uid}_upd"
|
||||
|
||||
update_objects_in_batches(
|
||||
tenant_id=str(tenant.id),
|
||||
model=Provider,
|
||||
objects=providers,
|
||||
fields=["uid"],
|
||||
batch_size=batch_size,
|
||||
)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
|
||||
assert qs.count() == total
|
||||
|
||||
def test_batch_size_default(self, tenant):
|
||||
default_size = settings.DJANGO_DELETION_BATCH_SIZE
|
||||
total = default_size + 2
|
||||
objs = self.make_provider_instances(tenant, total)
|
||||
create_objects_in_batches(str(tenant.id), Provider, objs)
|
||||
|
||||
providers = list(Provider.objects.filter(tenant=tenant))
|
||||
for p in providers:
|
||||
p.uid = f"{p.uid}_upd"
|
||||
|
||||
# Update without specifying batch_size (uses default)
|
||||
update_objects_in_batches(
|
||||
tenant_id=str(tenant.id),
|
||||
model=Provider,
|
||||
objects=providers,
|
||||
fields=["uid"],
|
||||
)
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
|
||||
assert qs.count() == total
|
||||
|
||||
@@ -1,379 +0,0 @@
|
||||
import json
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from django_celery_results.models import TaskResult
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from api.exceptions import (
|
||||
TaskFailedException,
|
||||
TaskInProgressException,
|
||||
TaskNotFoundException,
|
||||
)
|
||||
from api.models import Task, User
|
||||
from api.rls import Tenant
|
||||
from api.v1.mixins import PaginateByPkMixin, TaskManagementMixin
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestPaginateByPkMixin:
|
||||
@pytest.fixture
|
||||
def tenant(self):
|
||||
return Tenant.objects.create(name="Test Tenant")
|
||||
|
||||
@pytest.fixture
|
||||
def users(self, tenant):
|
||||
# Create 5 users with proper email field
|
||||
users = []
|
||||
for i in range(5):
|
||||
user = User.objects.create(email=f"user{i}@example.com", name=f"User {i}")
|
||||
users.append(user)
|
||||
return users
|
||||
|
||||
class DummyView(PaginateByPkMixin):
|
||||
def __init__(self, page):
|
||||
self._page = page
|
||||
|
||||
def paginate_queryset(self, qs):
|
||||
return self._page
|
||||
|
||||
def get_serializer(self, queryset, many):
|
||||
class S:
|
||||
def __init__(self, data):
|
||||
# serialize to list of ids
|
||||
self.data = [obj.id for obj in data] if many else queryset.id
|
||||
|
||||
return S(queryset)
|
||||
|
||||
def get_paginated_response(self, data):
|
||||
return Response({"results": data}, status=status.HTTP_200_OK)
|
||||
|
||||
def test_no_pagination(self, users):
|
||||
base_qs = User.objects.all().order_by("id")
|
||||
view = self.DummyView(page=None)
|
||||
resp = view.paginate_by_pk(
|
||||
request=None, base_queryset=base_qs, manager=User.objects
|
||||
)
|
||||
# since no pagination, should return all ids in order
|
||||
expected = [u.id for u in base_qs]
|
||||
assert isinstance(resp, Response)
|
||||
assert resp.data == expected
|
||||
|
||||
def test_with_pagination(self, users):
|
||||
base_qs = User.objects.all().order_by("id")
|
||||
# simulate paging to first 2 ids
|
||||
page = [base_qs[1].id, base_qs[3].id]
|
||||
view = self.DummyView(page=page)
|
||||
resp = view.paginate_by_pk(
|
||||
request=None, base_queryset=base_qs, manager=User.objects
|
||||
)
|
||||
# should fetch only those two users, in the same order as page
|
||||
assert resp.status_code == status.HTTP_200_OK
|
||||
assert resp.data == {"results": page}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTaskManagementMixin:
|
||||
class DummyView(TaskManagementMixin):
|
||||
pass
|
||||
|
||||
@pytest.fixture
|
||||
def tenant(self):
|
||||
return Tenant.objects.create(name="Test Tenant")
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def cleanup(self):
|
||||
Task.objects.all().delete()
|
||||
TaskResult.objects.all().delete()
|
||||
|
||||
def test_no_task_and_no_taskresult_raises_not_found(self):
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskNotFoundException):
|
||||
view.check_task_status("task_xyz", {"foo": "bar"})
|
||||
|
||||
def test_no_task_and_no_taskresult_returns_none_when_not_raising(self):
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status(
|
||||
"task_xyz", {"foo": "bar"}, raise_on_not_found=False
|
||||
)
|
||||
assert result is None
|
||||
|
||||
def test_taskresult_pending_raises_in_progress(self):
|
||||
task_kwargs = {"foo": "bar"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_xyz",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="PENDING",
|
||||
)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskInProgressException) as excinfo:
|
||||
view.check_task_status("task_xyz", task_kwargs, raise_on_not_found=False)
|
||||
assert hasattr(excinfo.value, "task_result")
|
||||
assert excinfo.value.task_result == tr
|
||||
|
||||
def test_taskresult_started_raises_in_progress(self):
|
||||
task_kwargs = {"foo": "bar"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_xyz",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="STARTED",
|
||||
)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskInProgressException) as excinfo:
|
||||
view.check_task_status("task_xyz", task_kwargs, raise_on_not_found=False)
|
||||
assert hasattr(excinfo.value, "task_result")
|
||||
assert excinfo.value.task_result == tr
|
||||
|
||||
def test_taskresult_progress_raises_in_progress(self):
|
||||
task_kwargs = {"foo": "bar"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_xyz",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="PROGRESS",
|
||||
)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskInProgressException) as excinfo:
|
||||
view.check_task_status("task_xyz", task_kwargs, raise_on_not_found=False)
|
||||
assert hasattr(excinfo.value, "task_result")
|
||||
assert excinfo.value.task_result == tr
|
||||
|
||||
def test_taskresult_failure_raises_failed(self):
|
||||
task_kwargs = {"a": 1}
|
||||
TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_fail",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="FAILURE",
|
||||
)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskFailedException):
|
||||
view.check_task_status("task_fail", task_kwargs, raise_on_not_found=False)
|
||||
|
||||
def test_taskresult_failure_returns_none_when_not_raising(self):
|
||||
task_kwargs = {"a": 1}
|
||||
TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_fail",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="FAILURE",
|
||||
)
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status(
|
||||
"task_fail", task_kwargs, raise_on_failed=False, raise_on_not_found=False
|
||||
)
|
||||
assert result is None
|
||||
|
||||
def test_taskresult_success_returns_none(self):
|
||||
task_kwargs = {"x": 2}
|
||||
TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_ok",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="SUCCESS",
|
||||
)
|
||||
view = self.DummyView()
|
||||
# should not raise, and returns None
|
||||
assert (
|
||||
view.check_task_status("task_ok", task_kwargs, raise_on_not_found=False)
|
||||
is None
|
||||
)
|
||||
|
||||
def test_taskresult_revoked_returns_none(self):
|
||||
task_kwargs = {"x": 2}
|
||||
TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="task_revoked",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="REVOKED",
|
||||
)
|
||||
view = self.DummyView()
|
||||
# should not raise, and returns None
|
||||
assert (
|
||||
view.check_task_status(
|
||||
"task_revoked", task_kwargs, raise_on_not_found=False
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
def test_task_with_failed_status_raises_failed(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="FAILURE",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskFailedException) as excinfo:
|
||||
view.check_task_status("scan_task", task_kwargs)
|
||||
# Check that the exception contains the expected task
|
||||
assert hasattr(excinfo.value, "task")
|
||||
assert excinfo.value.task == task
|
||||
|
||||
def test_task_with_cancelled_status_raises_failed(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="REVOKED",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
with pytest.raises(TaskFailedException) as excinfo:
|
||||
view.check_task_status("scan_task", task_kwargs)
|
||||
# Check that the exception contains the expected task
|
||||
assert hasattr(excinfo.value, "task")
|
||||
assert excinfo.value.task == task
|
||||
|
||||
def test_task_with_failed_status_returns_task_when_not_raising(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="FAILURE",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status("scan_task", task_kwargs, raise_on_failed=False)
|
||||
assert result == task
|
||||
|
||||
def test_task_with_completed_status_returns_none(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="SUCCESS",
|
||||
)
|
||||
Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status("scan_task", task_kwargs)
|
||||
assert result is None
|
||||
|
||||
def test_task_with_executing_status_returns_task(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="STARTED",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status("scan_task", task_kwargs)
|
||||
assert result is not None
|
||||
assert result.pk == task.pk
|
||||
|
||||
def test_task_with_pending_status_returns_task(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="PENDING",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.check_task_status("scan_task", task_kwargs)
|
||||
assert result is not None
|
||||
assert result.pk == task.pk
|
||||
|
||||
def test_get_task_response_if_running_returns_none_for_completed_task(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="SUCCESS",
|
||||
)
|
||||
Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.get_task_response_if_running("scan_task", task_kwargs)
|
||||
assert result is None
|
||||
|
||||
def test_get_task_response_if_running_returns_none_for_no_task(self):
|
||||
view = self.DummyView()
|
||||
result = view.get_task_response_if_running(
|
||||
"nonexistent", {"foo": "bar"}, raise_on_not_found=False
|
||||
)
|
||||
assert result is None
|
||||
|
||||
def test_get_task_response_if_running_returns_202_for_executing_task(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="STARTED",
|
||||
)
|
||||
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.get_task_response_if_running("scan_task", task_kwargs)
|
||||
|
||||
assert isinstance(result, Response)
|
||||
assert result.status_code == status.HTTP_202_ACCEPTED
|
||||
assert "Content-Location" in result.headers
|
||||
# The response should contain the serialized task data
|
||||
assert result.data is not None
|
||||
assert "id" in result.data
|
||||
assert str(result.data["id"]) == str(task.id)
|
||||
|
||||
def test_get_task_response_if_running_returns_none_for_available_task(self, tenant):
|
||||
task_kwargs = {"provider_id": "test"}
|
||||
tr = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs),
|
||||
status="PENDING",
|
||||
)
|
||||
Task.objects.create(tenant=tenant, task_runner_task=tr)
|
||||
view = self.DummyView()
|
||||
result = view.get_task_response_if_running("scan_task", task_kwargs)
|
||||
# PENDING maps to AVAILABLE, which is not EXECUTING, so should return None
|
||||
assert result is None
|
||||
|
||||
def test_kwargs_filtering_works_correctly(self, tenant):
|
||||
# Create tasks with different kwargs
|
||||
task_kwargs_1 = {"provider_id": "test1", "scan_type": "full"}
|
||||
task_kwargs_2 = {"provider_id": "test2", "scan_type": "quick"}
|
||||
|
||||
tr1 = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs_1),
|
||||
status="STARTED",
|
||||
)
|
||||
tr2 = TaskResult.objects.create(
|
||||
task_id=str(uuid4()),
|
||||
task_name="scan_task",
|
||||
task_kwargs=json.dumps(task_kwargs_2),
|
||||
status="STARTED",
|
||||
)
|
||||
|
||||
task1 = Task.objects.create(tenant=tenant, task_runner_task=tr1)
|
||||
task2 = Task.objects.create(tenant=tenant, task_runner_task=tr2)
|
||||
|
||||
view = self.DummyView()
|
||||
|
||||
# Should find task1 when searching for its kwargs
|
||||
result1 = view.check_task_status("scan_task", {"provider_id": "test1"})
|
||||
assert result1 is not None
|
||||
assert result1.pk == task1.pk
|
||||
|
||||
# Should find task2 when searching for its kwargs
|
||||
result2 = view.check_task_status("scan_task", {"provider_id": "test2"})
|
||||
assert result2 is not None
|
||||
assert result2.pk == task2.pk
|
||||
|
||||
# Should not find anything when searching for non-existent kwargs
|
||||
result3 = view.check_task_status(
|
||||
"scan_task", {"provider_id": "test3"}, raise_on_not_found=False
|
||||
)
|
||||
assert result3 is None
|
||||
@@ -1,9 +1,6 @@
|
||||
import pytest
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.models import Resource, ResourceTag, SAMLConfiguration, SAMLDomainIndex
|
||||
from api.models import Resource, ResourceTag
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -123,204 +120,3 @@ class TestResourceModel:
|
||||
# compliance={},
|
||||
# )
|
||||
# assert Finding.objects.filter(uid=long_uid).exists()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestSAMLConfigurationModel:
|
||||
VALID_METADATA = """<?xml version='1.0' encoding='UTF-8'?>
|
||||
<md:EntityDescriptor entityID='TEST' xmlns:md='urn:oasis:names:tc:SAML:2.0:metadata'>
|
||||
<md:IDPSSODescriptor WantAuthnRequestsSigned='false' protocolSupportEnumeration='urn:oasis:names:tc:SAML:2.0:protocol'>
|
||||
<md:KeyDescriptor use='signing'>
|
||||
<ds:KeyInfo xmlns:ds='http://www.w3.org/2000/09/xmldsig#'>
|
||||
<ds:X509Data>
|
||||
<ds:X509Certificate>FAKECERTDATA</ds:X509Certificate>
|
||||
</ds:X509Data>
|
||||
</ds:KeyInfo>
|
||||
</md:KeyDescriptor>
|
||||
<md:SingleSignOnService Binding='urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST' Location='https://idp.test/sso'/>
|
||||
</md:IDPSSODescriptor>
|
||||
</md:EntityDescriptor>
|
||||
"""
|
||||
|
||||
def test_creates_valid_configuration(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
config = SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="ssoexample.com",
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
assert config.email_domain == "ssoexample.com"
|
||||
assert SocialApp.objects.filter(client_id="ssoexample.com").exists()
|
||||
|
||||
def test_email_domain_with_at_symbol_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
config = SAMLConfiguration(
|
||||
email_domain="invalid@domain.com",
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
config.clean()
|
||||
|
||||
errors = exc_info.value.message_dict
|
||||
assert "email_domain" in errors
|
||||
assert "Domain must not contain @" in errors["email_domain"][0]
|
||||
|
||||
def test_duplicate_email_domain_fails(self, tenants_fixture):
|
||||
tenant1, tenant2, *_ = tenants_fixture
|
||||
|
||||
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="duplicate.com",
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
tenant=tenant1,
|
||||
)
|
||||
|
||||
config = SAMLConfiguration(
|
||||
email_domain="duplicate.com",
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
tenant=tenant2,
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
config.clean()
|
||||
|
||||
errors = exc_info.value.message_dict
|
||||
assert "tenant" in errors
|
||||
assert "There is a problem with your email domain." in errors["tenant"][0]
|
||||
|
||||
def test_duplicate_tenant_config_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
|
||||
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="unique1.com",
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
config = SAMLConfiguration(
|
||||
email_domain="unique2.com",
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
config.clean()
|
||||
|
||||
errors = exc_info.value.message_dict
|
||||
assert "tenant" in errors
|
||||
assert (
|
||||
"A SAML configuration already exists for this tenant."
|
||||
in errors["tenant"][0]
|
||||
)
|
||||
|
||||
def test_invalid_metadata_xml_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
config = SAMLConfiguration(
|
||||
email_domain="brokenxml.com",
|
||||
metadata_xml="<bad<xml>",
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
config._parse_metadata()
|
||||
|
||||
errors = exc_info.value.message_dict
|
||||
assert "metadata_xml" in errors
|
||||
assert "Invalid XML" in errors["metadata_xml"][0]
|
||||
assert "not well-formed" in errors["metadata_xml"][0]
|
||||
|
||||
def test_metadata_missing_sso_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
xml = """<md:EntityDescriptor entityID="x" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata">
|
||||
<md:IDPSSODescriptor></md:IDPSSODescriptor>
|
||||
</md:EntityDescriptor>"""
|
||||
config = SAMLConfiguration(
|
||||
email_domain="nosso.com",
|
||||
metadata_xml=xml,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
config._parse_metadata()
|
||||
|
||||
errors = exc_info.value.message_dict
|
||||
assert "metadata_xml" in errors
|
||||
assert "Missing SingleSignOnService" in errors["metadata_xml"][0]
|
||||
|
||||
def test_metadata_missing_certificate_fails(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
xml = """<md:EntityDescriptor entityID="x" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata">
|
||||
<md:IDPSSODescriptor>
|
||||
<md:SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" Location="https://example.com/sso"/>
|
||||
</md:IDPSSODescriptor>
|
||||
</md:EntityDescriptor>"""
|
||||
config = SAMLConfiguration(
|
||||
email_domain="nocert.com",
|
||||
metadata_xml=xml,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
config._parse_metadata()
|
||||
|
||||
errors = exc_info.value.message_dict
|
||||
assert "metadata_xml" in errors
|
||||
assert "X509Certificate" in errors["metadata_xml"][0]
|
||||
|
||||
def test_deletes_saml_configuration_and_related_objects(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
email_domain = "deleteme.com"
|
||||
|
||||
# Create the configuration
|
||||
config = SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain=email_domain,
|
||||
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
# Verify that the SocialApp and SAMLDomainIndex exist
|
||||
assert SocialApp.objects.filter(client_id=email_domain).exists()
|
||||
assert (
|
||||
SAMLDomainIndex.objects.using(MainRouter.admin_db)
|
||||
.filter(email_domain=email_domain)
|
||||
.exists()
|
||||
)
|
||||
|
||||
# Delete the configuration
|
||||
config.delete()
|
||||
|
||||
# Verify that the configuration and its related objects are deleted
|
||||
assert (
|
||||
not SAMLConfiguration.objects.using(MainRouter.admin_db)
|
||||
.filter(pk=config.pk)
|
||||
.exists()
|
||||
)
|
||||
assert not SocialApp.objects.filter(client_id=email_domain).exists()
|
||||
assert (
|
||||
not SAMLDomainIndex.objects.using(MainRouter.admin_db)
|
||||
.filter(email_domain=email_domain)
|
||||
.exists()
|
||||
)
|
||||
|
||||
def test_duplicate_entity_id_fails_on_creation(self, tenants_fixture):
|
||||
tenant1, tenant2, *_ = tenants_fixture
|
||||
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
email_domain="first.com",
|
||||
metadata_xml=self.VALID_METADATA,
|
||||
tenant=tenant1,
|
||||
)
|
||||
|
||||
config = SAMLConfiguration(
|
||||
email_domain="second.com",
|
||||
metadata_xml=self.VALID_METADATA,
|
||||
tenant=tenant2,
|
||||
)
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
config.save()
|
||||
|
||||
errors = exc_info.value.message_dict
|
||||
assert "metadata_xml" in errors
|
||||
assert "There is a problem with your metadata." in errors["metadata_xml"][0]
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
from conftest import TODAY
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
@@ -61,7 +60,7 @@ class TestUserViewSet:
|
||||
def test_create_user_with_all_permissions(self, authenticated_client_rbac):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "Newpassword123@",
|
||||
"password": "newpassword123",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_rbac.post(
|
||||
@@ -75,7 +74,7 @@ class TestUserViewSet:
|
||||
):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "Newpassword123@",
|
||||
"password": "newpassword123",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
@@ -322,7 +321,7 @@ class TestProviderViewSet:
|
||||
@pytest.mark.django_db
|
||||
class TestLimitedVisibility:
|
||||
TEST_EMAIL = "rbac@rbac.com"
|
||||
TEST_PASSWORD = "Thisisapassword123@"
|
||||
TEST_PASSWORD = "thisisapassword123"
|
||||
|
||||
@pytest.fixture
|
||||
def limited_admin_user(
|
||||
@@ -410,87 +409,3 @@ class TestLimitedVisibility:
|
||||
assert (
|
||||
response.json()["data"]["relationships"]["providers"]["meta"]["count"] == 1
|
||||
)
|
||||
|
||||
def test_overviews_providers(
|
||||
self,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(reverse("overview-providers"))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) > 0
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(reverse("overview-providers"))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"endpoint_name",
|
||||
[
|
||||
"findings",
|
||||
"findings_severity",
|
||||
],
|
||||
)
|
||||
def test_overviews_findings(
|
||||
self,
|
||||
endpoint_name,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse(f"overview-{endpoint_name}")
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
values = response.json()["data"]["attributes"].values()
|
||||
assert any(value > 0 for value in values)
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse(f"overview-{endpoint_name}")
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]["attributes"].values()
|
||||
assert all(value == 0 for value in data)
|
||||
|
||||
def test_overviews_services(
|
||||
self,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("overview-services"), {"filter[inserted_at]": TODAY}
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) > 0
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("overview-services"), {"filter[inserted_at]": TODAY}
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
import logging
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from config.settings.sentry import before_send
|
||||
|
||||
|
||||
def test_before_send_ignores_log_with_ignored_exception():
|
||||
"""Test that before_send ignores logs containing ignored exceptions."""
|
||||
log_record = MagicMock()
|
||||
log_record.msg = "Provider kubernetes is not connected"
|
||||
log_record.levelno = logging.ERROR # 40
|
||||
|
||||
hint = {"log_record": log_record}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was dropped (None returned)
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_before_send_ignores_exception_with_ignored_exception():
|
||||
"""Test that before_send ignores exceptions containing ignored exceptions."""
|
||||
exc_info = (Exception, Exception("Provider kubernetes is not connected"), None)
|
||||
|
||||
hint = {"exc_info": exc_info}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was dropped (None returned)
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_before_send_passes_through_non_ignored_log():
|
||||
"""Test that before_send passes through logs that don't contain ignored exceptions."""
|
||||
log_record = MagicMock()
|
||||
log_record.msg = "Some other error message"
|
||||
log_record.levelno = logging.ERROR # 40
|
||||
|
||||
hint = {"log_record": log_record}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was passed through
|
||||
assert result == event
|
||||
|
||||
|
||||
def test_before_send_passes_through_non_ignored_exception():
|
||||
"""Test that before_send passes through exceptions that don't contain ignored exceptions."""
|
||||
exc_info = (Exception, Exception("Some other error message"), None)
|
||||
|
||||
hint = {"exc_info": exc_info}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was passed through
|
||||
assert result == event
|
||||
|
||||
|
||||
def test_before_send_handles_warning_level():
|
||||
"""Test that before_send handles warning level logs."""
|
||||
log_record = MagicMock()
|
||||
log_record.msg = "Provider kubernetes is not connected"
|
||||
log_record.levelno = logging.WARNING # 30
|
||||
|
||||
hint = {"log_record": log_record}
|
||||
|
||||
event = MagicMock()
|
||||
|
||||
result = before_send(event, hint)
|
||||
|
||||
# Assert that the event was dropped (None returned)
|
||||
assert result is None
|
||||
@@ -1,100 +0,0 @@
|
||||
import pytest
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from api.v1.serializer_utils.integrations import S3ConfigSerializer
|
||||
|
||||
|
||||
class TestS3ConfigSerializer:
|
||||
"""Test cases for S3ConfigSerializer validation."""
|
||||
|
||||
def test_validate_output_directory_valid_paths(self):
|
||||
"""Test that valid output directory paths are accepted."""
|
||||
serializer = S3ConfigSerializer()
|
||||
|
||||
# Test normal paths
|
||||
assert serializer.validate_output_directory("test") == "test"
|
||||
assert serializer.validate_output_directory("test/folder") == "test/folder"
|
||||
assert serializer.validate_output_directory("my-folder_123") == "my-folder_123"
|
||||
|
||||
# Test paths with leading slashes (should be normalized)
|
||||
assert serializer.validate_output_directory("/test") == "test"
|
||||
assert serializer.validate_output_directory("/test/folder") == "test/folder"
|
||||
|
||||
# Test paths with excessive slashes (should be normalized)
|
||||
assert serializer.validate_output_directory("///test") == "test"
|
||||
assert serializer.validate_output_directory("///////test") == "test"
|
||||
assert serializer.validate_output_directory("test//folder") == "test/folder"
|
||||
assert serializer.validate_output_directory("test///folder") == "test/folder"
|
||||
|
||||
def test_validate_output_directory_empty_values(self):
|
||||
"""Test that empty values raise validation errors."""
|
||||
serializer = S3ConfigSerializer()
|
||||
|
||||
with pytest.raises(
|
||||
ValidationError, match="Output directory cannot be empty or just"
|
||||
):
|
||||
serializer.validate_output_directory(".")
|
||||
|
||||
with pytest.raises(
|
||||
ValidationError, match="Output directory cannot be empty or just"
|
||||
):
|
||||
serializer.validate_output_directory("/")
|
||||
|
||||
def test_validate_output_directory_invalid_characters(self):
|
||||
"""Test that invalid characters are rejected."""
|
||||
serializer = S3ConfigSerializer()
|
||||
|
||||
invalid_chars = ["<", ">", ":", '"', "|", "?", "*"]
|
||||
|
||||
for char in invalid_chars:
|
||||
with pytest.raises(
|
||||
ValidationError, match="Output directory contains invalid characters"
|
||||
):
|
||||
serializer.validate_output_directory(f"test{char}folder")
|
||||
|
||||
def test_validate_output_directory_too_long(self):
|
||||
"""Test that paths that are too long are rejected."""
|
||||
serializer = S3ConfigSerializer()
|
||||
|
||||
# Create a path longer than 900 characters
|
||||
long_path = "a" * 901
|
||||
|
||||
with pytest.raises(ValidationError, match="Output directory path is too long"):
|
||||
serializer.validate_output_directory(long_path)
|
||||
|
||||
def test_validate_output_directory_edge_cases(self):
|
||||
"""Test edge cases for output directory validation."""
|
||||
serializer = S3ConfigSerializer()
|
||||
|
||||
# Test path at the limit (900 characters)
|
||||
path_at_limit = "a" * 900
|
||||
assert serializer.validate_output_directory(path_at_limit) == path_at_limit
|
||||
|
||||
# Test complex normalization
|
||||
assert serializer.validate_output_directory("//test/../folder//") == "folder"
|
||||
assert serializer.validate_output_directory("/test/./folder/") == "test/folder"
|
||||
|
||||
def test_s3_config_serializer_full_validation(self):
|
||||
"""Test the full S3ConfigSerializer with valid data."""
|
||||
data = {
|
||||
"bucket_name": "my-test-bucket",
|
||||
"output_directory": "///////test", # This should be normalized
|
||||
}
|
||||
|
||||
serializer = S3ConfigSerializer(data=data)
|
||||
assert serializer.is_valid()
|
||||
|
||||
validated_data = serializer.validated_data
|
||||
assert validated_data["bucket_name"] == "my-test-bucket"
|
||||
assert validated_data["output_directory"] == "test" # Normalized
|
||||
|
||||
def test_s3_config_serializer_invalid_data(self):
|
||||
"""Test the full S3ConfigSerializer with invalid data."""
|
||||
data = {
|
||||
"bucket_name": "my-test-bucket",
|
||||
"output_directory": "test<invalid", # Contains invalid character
|
||||
}
|
||||
|
||||
serializer = S3ConfigSerializer(data=data)
|
||||
assert not serializer.is_valid()
|
||||
assert "output_directory" in serializer.errors
|
||||
@@ -131,21 +131,6 @@ class TestInitializeProwlerProvider:
|
||||
initialize_prowler_provider(provider)
|
||||
mock_return_prowler_provider.return_value.assert_called_once_with(key="value")
|
||||
|
||||
@patch("api.utils.return_prowler_provider")
|
||||
def test_initialize_prowler_provider_with_mutelist(
|
||||
self, mock_return_prowler_provider
|
||||
):
|
||||
provider = MagicMock()
|
||||
provider.secret.secret = {"key": "value"}
|
||||
mutelist_processor = MagicMock()
|
||||
mutelist_processor.configuration = {"Mutelist": {"key": "value"}}
|
||||
mock_return_prowler_provider.return_value = MagicMock()
|
||||
|
||||
initialize_prowler_provider(provider, mutelist_processor)
|
||||
mock_return_prowler_provider.return_value.assert_called_once_with(
|
||||
key="value", mutelist_content={"key": "value"}
|
||||
)
|
||||
|
||||
|
||||
class TestProwlerProviderConnectionTest:
|
||||
@patch("api.utils.return_prowler_provider")
|
||||
@@ -215,25 +200,6 @@ class TestGetProwlerProviderKwargs:
|
||||
expected_result = {**secret_dict, **expected_extra_kwargs}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_with_mutelist(self):
|
||||
provider_uid = "provider_uid"
|
||||
secret_dict = {"key": "value"}
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = secret_dict
|
||||
|
||||
mutelist_processor = MagicMock()
|
||||
mutelist_processor.configuration = {"Mutelist": {"key": "value"}}
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = Provider.ProviderChoices.AWS.value
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider, mutelist_processor)
|
||||
|
||||
expected_result = {**secret_dict, "mutelist_content": {"key": "value"}}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_unsupported_provider(self):
|
||||
# Setup
|
||||
provider_uid = "provider_uid"
|
||||
@@ -288,7 +254,7 @@ class TestValidateInvitation:
|
||||
|
||||
assert result == invitation
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email__iexact="user@example.com"
|
||||
token="VALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_validation_error(self):
|
||||
@@ -303,7 +269,7 @@ class TestValidateInvitation:
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email__iexact="user@example.com"
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_not_found(self):
|
||||
@@ -318,7 +284,7 @@ class TestValidateInvitation:
|
||||
|
||||
assert exc_info.value.detail == "Invitation is not valid."
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email__iexact="user@example.com"
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_expired(self, invitation):
|
||||
@@ -366,27 +332,5 @@ class TestValidateInvitation:
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email__iexact="different@example.com"
|
||||
)
|
||||
|
||||
def test_valid_invitation_uppercase_email(self):
|
||||
"""Test that validate_invitation works with case-insensitive email lookup."""
|
||||
uppercase_email = "USER@example.com"
|
||||
|
||||
invitation = MagicMock(spec=Invitation)
|
||||
invitation.token = "VALID_TOKEN"
|
||||
invitation.email = uppercase_email
|
||||
invitation.expires_at = datetime.now(timezone.utc) + timedelta(days=1)
|
||||
invitation.state = Invitation.State.PENDING
|
||||
invitation.tenant = MagicMock()
|
||||
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.return_value = invitation
|
||||
|
||||
result = validate_invitation("VALID_TOKEN", "user@example.com")
|
||||
|
||||
assert result == invitation
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email__iexact="user@example.com"
|
||||
token="VALID_TOKEN", email="different@example.com"
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,14 +7,12 @@ from rest_framework.exceptions import NotFound, ValidationError
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.exceptions import InvitationTokenExpiredException
|
||||
from api.models import Integration, Invitation, Processor, Provider, Resource
|
||||
from api.models import Invitation, Provider, Resource
|
||||
from api.v1.serializers import FindingMetadataSerializer
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.aws.lib.s3.s3 import S3
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
from prowler.providers.common.models import Connection
|
||||
from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.github.github_provider import GithubProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
from prowler.providers.m365.m365_provider import M365Provider
|
||||
|
||||
@@ -57,21 +55,14 @@ def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:
|
||||
|
||||
def return_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> [
|
||||
AwsProvider
|
||||
| AzureProvider
|
||||
| GcpProvider
|
||||
| GithubProvider
|
||||
| KubernetesProvider
|
||||
| M365Provider
|
||||
]:
|
||||
) -> [AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider]:
|
||||
"""Return the Prowler provider class based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | GithubProvider | KubernetesProvider | M365Provider: The corresponding provider class.
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: The corresponding provider class.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provider type specified in `provider.provider` is not supported.
|
||||
@@ -87,21 +78,16 @@ def return_prowler_provider(
|
||||
prowler_provider = KubernetesProvider
|
||||
case Provider.ProviderChoices.M365.value:
|
||||
prowler_provider = M365Provider
|
||||
case Provider.ProviderChoices.GITHUB.value:
|
||||
prowler_provider = GithubProvider
|
||||
case _:
|
||||
raise ValueError(f"Provider type {provider.provider} not supported")
|
||||
return prowler_provider
|
||||
|
||||
|
||||
def get_prowler_provider_kwargs(
|
||||
provider: Provider, mutelist_processor: Processor | None = None
|
||||
) -> dict:
|
||||
def get_prowler_provider_kwargs(provider: Provider) -> dict:
|
||||
"""Get the Prowler provider kwargs based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secret.
|
||||
mutelist_processor (Processor): The mutelist processor object containing the mutelist configuration.
|
||||
|
||||
Returns:
|
||||
dict: The provider kwargs for the corresponding provider class.
|
||||
@@ -119,39 +105,24 @@ def get_prowler_provider_kwargs(
|
||||
}
|
||||
elif provider.provider == Provider.ProviderChoices.KUBERNETES.value:
|
||||
prowler_provider_kwargs = {**prowler_provider_kwargs, "context": provider.uid}
|
||||
|
||||
if mutelist_processor:
|
||||
mutelist_content = mutelist_processor.configuration.get("Mutelist", {})
|
||||
if mutelist_content:
|
||||
prowler_provider_kwargs["mutelist_content"] = mutelist_content
|
||||
|
||||
return prowler_provider_kwargs
|
||||
|
||||
|
||||
def initialize_prowler_provider(
|
||||
provider: Provider,
|
||||
mutelist_processor: Processor | None = None,
|
||||
) -> (
|
||||
AwsProvider
|
||||
| AzureProvider
|
||||
| GcpProvider
|
||||
| GithubProvider
|
||||
| KubernetesProvider
|
||||
| M365Provider
|
||||
):
|
||||
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider:
|
||||
"""Initialize a Prowler provider instance based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
mutelist_processor (Processor): The mutelist processor object containing the mutelist configuration.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | GithubProvider | KubernetesProvider | M365Provider: An instance of the corresponding provider class
|
||||
(`AwsProvider`, `AzureProvider`, `GcpProvider`, `GithubProvider`, `KubernetesProvider` or `M365Provider`) initialized with the
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: An instance of the corresponding provider class
|
||||
(`AwsProvider`, `AzureProvider`, `GcpProvider`, `KubernetesProvider` or `M365Provider`) initialized with the
|
||||
provider's secrets.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
prowler_provider_kwargs = get_prowler_provider_kwargs(provider, mutelist_processor)
|
||||
prowler_provider_kwargs = get_prowler_provider_kwargs(provider)
|
||||
return prowler_provider(**prowler_provider_kwargs)
|
||||
|
||||
|
||||
@@ -176,37 +147,6 @@ def prowler_provider_connection_test(provider: Provider) -> Connection:
|
||||
)
|
||||
|
||||
|
||||
def prowler_integration_connection_test(integration: Integration) -> Connection:
|
||||
"""Test the connection to a Prowler integration based on the given integration type.
|
||||
|
||||
Args:
|
||||
integration (Integration): The integration object containing the integration type and associated credentials.
|
||||
|
||||
Returns:
|
||||
Connection: A connection object representing the result of the connection test for the specified integration.
|
||||
"""
|
||||
if integration.integration_type == Integration.IntegrationChoices.AMAZON_S3:
|
||||
return S3.test_connection(
|
||||
**integration.credentials,
|
||||
bucket_name=integration.configuration["bucket_name"],
|
||||
raise_on_exception=False,
|
||||
)
|
||||
# TODO: It is possible that we can unify the connection test for all integrations, but need refactoring
|
||||
# to avoid code duplication. Actually the AWS integrations are similar, so SecurityHub and S3 can be unified making some changes in the SDK.
|
||||
elif (
|
||||
integration.integration_type == Integration.IntegrationChoices.AWS_SECURITY_HUB
|
||||
):
|
||||
pass
|
||||
elif integration.integration_type == Integration.IntegrationChoices.JIRA:
|
||||
pass
|
||||
elif integration.integration_type == Integration.IntegrationChoices.SLACK:
|
||||
pass
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Integration type {integration.integration_type} not supported"
|
||||
)
|
||||
|
||||
|
||||
def validate_invitation(
|
||||
invitation_token: str, email: str, raise_not_found=False
|
||||
) -> Invitation:
|
||||
@@ -247,7 +187,7 @@ def validate_invitation(
|
||||
# Admin DB connector is used to bypass RLS protection since the invitation belongs to a tenant the user
|
||||
# is not a member of yet
|
||||
invitation = Invitation.objects.using(MainRouter.admin_db).get(
|
||||
token=invitation_token, email__iexact=email
|
||||
token=invitation_token, email=email
|
||||
)
|
||||
except Invitation.DoesNotExist:
|
||||
if raise_not_found:
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
from django.urls import reverse
|
||||
from django_celery_results.models import TaskResult
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from api.exceptions import (
|
||||
TaskFailedException,
|
||||
TaskInProgressException,
|
||||
TaskNotFoundException,
|
||||
)
|
||||
from api.models import StateChoices, Task
|
||||
from api.v1.serializers import TaskSerializer
|
||||
|
||||
|
||||
class PaginateByPkMixin:
|
||||
"""
|
||||
@@ -24,211 +13,21 @@ class PaginateByPkMixin:
|
||||
request, # noqa: F841
|
||||
base_queryset,
|
||||
manager,
|
||||
select_related: list | None = None,
|
||||
prefetch_related: list | None = None,
|
||||
select_related: list[str] | None = None,
|
||||
prefetch_related: list[str] | None = None,
|
||||
) -> Response:
|
||||
"""
|
||||
Paginate a queryset by primary key.
|
||||
|
||||
This method is useful when you want to paginate a queryset that has been
|
||||
filtered or annotated in a way that would be lost if you used the default
|
||||
pagination method.
|
||||
"""
|
||||
pk_list = base_queryset.values_list("id", flat=True)
|
||||
page = self.paginate_queryset(pk_list)
|
||||
if page is None:
|
||||
return Response(self.get_serializer(base_queryset, many=True).data)
|
||||
|
||||
queryset = manager.filter(id__in=page)
|
||||
|
||||
if select_related:
|
||||
queryset = queryset.select_related(*select_related)
|
||||
if prefetch_related:
|
||||
queryset = queryset.prefetch_related(*prefetch_related)
|
||||
|
||||
# Optimize tags loading, if applicable
|
||||
if hasattr(self, "_optimize_tags_loading"):
|
||||
queryset = self._optimize_tags_loading(queryset)
|
||||
|
||||
queryset = sorted(queryset, key=lambda obj: page.index(obj.id))
|
||||
|
||||
serialized = self.get_serializer(queryset, many=True).data
|
||||
return self.get_paginated_response(serialized)
|
||||
|
||||
|
||||
class TaskManagementMixin:
|
||||
"""
|
||||
Mixin to manage task status checking.
|
||||
|
||||
This mixin provides functionality to check if a task with specific parameters
|
||||
is running, completed, failed, or doesn't exist. It returns the task when running
|
||||
and raises specific exceptions for failed/not found scenarios that can be handled
|
||||
at the view level.
|
||||
"""
|
||||
|
||||
def check_task_status(
|
||||
self,
|
||||
task_name: str,
|
||||
task_kwargs: dict,
|
||||
raise_on_failed: bool = True,
|
||||
raise_on_not_found: bool = True,
|
||||
) -> Task | None:
|
||||
"""
|
||||
Check the status of a task with given name and kwargs.
|
||||
|
||||
This method first checks for a related Task object, and if not found,
|
||||
checks TaskResult directly. If a TaskResult is found and running but
|
||||
there's no related Task, it raises TaskInProgressException.
|
||||
|
||||
Args:
|
||||
task_name (str): The name of the task to check
|
||||
task_kwargs (dict): The kwargs to match against the task
|
||||
raise_on_failed (bool): Whether to raise exception if task failed
|
||||
raise_on_not_found (bool): Whether to raise exception if task not found
|
||||
|
||||
Returns:
|
||||
Task | None: The task instance if found (regardless of state), None if not found and raise_on_not_found=False
|
||||
|
||||
Raises:
|
||||
TaskFailedException: If task failed and raise_on_failed=True
|
||||
TaskNotFoundException: If task not found and raise_on_not_found=True
|
||||
TaskInProgressException: If task is running but no related Task object exists
|
||||
"""
|
||||
# First, try to find a Task object with related TaskResult
|
||||
try:
|
||||
# Build the filter for task kwargs
|
||||
task_filter = {
|
||||
"task_runner_task__task_name": task_name,
|
||||
}
|
||||
|
||||
# Add kwargs filters - we need to check if the task kwargs contain our parameters
|
||||
for key, value in task_kwargs.items():
|
||||
task_filter["task_runner_task__task_kwargs__contains"] = str(value)
|
||||
|
||||
task = (
|
||||
Task.objects.filter(**task_filter)
|
||||
.select_related("task_runner_task")
|
||||
.order_by("-inserted_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
if task:
|
||||
# Get task state using the same logic as TaskSerializer
|
||||
task_state_mapping = {
|
||||
"PENDING": StateChoices.AVAILABLE,
|
||||
"STARTED": StateChoices.EXECUTING,
|
||||
"PROGRESS": StateChoices.EXECUTING,
|
||||
"SUCCESS": StateChoices.COMPLETED,
|
||||
"FAILURE": StateChoices.FAILED,
|
||||
"REVOKED": StateChoices.CANCELLED,
|
||||
}
|
||||
|
||||
celery_status = (
|
||||
task.task_runner_task.status if task.task_runner_task else None
|
||||
)
|
||||
task_state = task_state_mapping.get(
|
||||
celery_status or "", StateChoices.AVAILABLE
|
||||
)
|
||||
|
||||
# Check task state and raise exceptions accordingly
|
||||
if task_state in (StateChoices.FAILED, StateChoices.CANCELLED):
|
||||
if raise_on_failed:
|
||||
raise TaskFailedException(task=task)
|
||||
return task
|
||||
elif task_state == StateChoices.COMPLETED:
|
||||
return None
|
||||
|
||||
return task
|
||||
|
||||
except Task.DoesNotExist:
|
||||
pass
|
||||
|
||||
# If no Task found, check TaskResult directly
|
||||
try:
|
||||
# Build the filter for TaskResult
|
||||
task_result_filter = {
|
||||
"task_name": task_name,
|
||||
}
|
||||
|
||||
# Add kwargs filters - check if the task kwargs contain our parameters
|
||||
for key, value in task_kwargs.items():
|
||||
task_result_filter["task_kwargs__contains"] = str(value)
|
||||
|
||||
task_result = (
|
||||
TaskResult.objects.filter(**task_result_filter)
|
||||
.order_by("-date_created")
|
||||
.first()
|
||||
)
|
||||
|
||||
if task_result:
|
||||
# Check if the TaskResult indicates a running task
|
||||
if task_result.status in ["PENDING", "STARTED", "PROGRESS"]:
|
||||
# Task is running but no related Task object exists
|
||||
raise TaskInProgressException(task_result=task_result)
|
||||
elif task_result.status == "FAILURE":
|
||||
if raise_on_failed:
|
||||
raise TaskFailedException(task=None)
|
||||
# For other statuses (SUCCESS, REVOKED), we don't have a Task to return,
|
||||
# so we treat it as not found
|
||||
|
||||
except TaskResult.DoesNotExist:
|
||||
pass
|
||||
|
||||
# No task found at all
|
||||
if raise_on_not_found:
|
||||
raise TaskNotFoundException()
|
||||
return None
|
||||
|
||||
def get_task_response_if_running(
|
||||
self,
|
||||
task_name: str,
|
||||
task_kwargs: dict,
|
||||
raise_on_failed: bool = True,
|
||||
raise_on_not_found: bool = True,
|
||||
) -> Response | None:
|
||||
"""
|
||||
Get a 202 response with task details if the task is currently running.
|
||||
|
||||
This method is useful for endpoints that should return task status when
|
||||
a background task is in progress, similar to the compliance overview endpoints.
|
||||
|
||||
Args:
|
||||
task_name (str): The name of the task to check
|
||||
task_kwargs (dict): The kwargs to match against the task
|
||||
|
||||
Returns:
|
||||
Response | None: 202 response with task details if running, None otherwise
|
||||
"""
|
||||
task = self.check_task_status(
|
||||
task_name=task_name,
|
||||
task_kwargs=task_kwargs,
|
||||
raise_on_failed=raise_on_failed,
|
||||
raise_on_not_found=raise_on_not_found,
|
||||
)
|
||||
|
||||
if not task:
|
||||
return None
|
||||
|
||||
# Get task state
|
||||
task_state_mapping = {
|
||||
"PENDING": StateChoices.AVAILABLE,
|
||||
"STARTED": StateChoices.EXECUTING,
|
||||
"PROGRESS": StateChoices.EXECUTING,
|
||||
"SUCCESS": StateChoices.COMPLETED,
|
||||
"FAILURE": StateChoices.FAILED,
|
||||
"REVOKED": StateChoices.CANCELLED,
|
||||
}
|
||||
|
||||
celery_status = task.task_runner_task.status if task.task_runner_task else None
|
||||
task_state = task_state_mapping.get(celery_status or "", StateChoices.AVAILABLE)
|
||||
|
||||
if task_state == StateChoices.EXECUTING:
|
||||
self.response_serializer_class = TaskSerializer
|
||||
serializer = TaskSerializer(task)
|
||||
return Response(
|
||||
data=serializer.data,
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
headers={
|
||||
"Content-Location": reverse("task-detail", kwargs={"pk": task.id})
|
||||
},
|
||||
)
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
import yaml
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
|
||||
class BaseValidateSerializer(serializers.Serializer):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
class YamlOrJsonField(serializers.JSONField):
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, str):
|
||||
try:
|
||||
data = yaml.safe_load(data)
|
||||
except yaml.YAMLError as exc:
|
||||
raise serializers.ValidationError("Invalid YAML format") from exc
|
||||
return super().to_internal_value(data)
|
||||
@@ -1,52 +1,24 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.v1.serializer_utils.base import BaseValidateSerializer
|
||||
|
||||
class BaseValidateSerializer(serializers.Serializer):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
# Integrations
|
||||
|
||||
|
||||
class S3ConfigSerializer(BaseValidateSerializer):
|
||||
bucket_name = serializers.CharField()
|
||||
output_directory = serializers.CharField(allow_blank=True)
|
||||
|
||||
def validate_output_directory(self, value):
|
||||
"""
|
||||
Validate the output_directory field to ensure it's a properly formatted path.
|
||||
Prevents paths with excessive slashes like "///////test".
|
||||
If empty, sets a default value.
|
||||
"""
|
||||
# If empty or None, set default value
|
||||
if not value:
|
||||
return "output"
|
||||
|
||||
# Normalize the path to remove excessive slashes
|
||||
normalized_path = os.path.normpath(value)
|
||||
|
||||
# Remove leading slashes for S3 paths
|
||||
if normalized_path.startswith("/"):
|
||||
normalized_path = normalized_path.lstrip("/")
|
||||
|
||||
# Check for invalid characters or patterns
|
||||
if re.search(r'[<>:"|?*]', normalized_path):
|
||||
raise serializers.ValidationError(
|
||||
'Output directory contains invalid characters. Avoid: < > : " | ? *'
|
||||
)
|
||||
|
||||
# Check for empty path after normalization
|
||||
if not normalized_path or normalized_path == ".":
|
||||
raise serializers.ValidationError(
|
||||
"Output directory cannot be empty or just '.' or '/'."
|
||||
)
|
||||
|
||||
# Check for paths that are too long (S3 key limit is 1024 characters, leave some room for filename)
|
||||
if len(normalized_path) > 900:
|
||||
raise serializers.ValidationError(
|
||||
"Output directory path is too long (max 900 characters)."
|
||||
)
|
||||
|
||||
return normalized_path
|
||||
output_directory = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "integrations"
|
||||
@@ -138,13 +110,10 @@ class IntegrationCredentialField(serializers.JSONField):
|
||||
},
|
||||
"output_directory": {
|
||||
"type": "string",
|
||||
"description": 'The directory path within the bucket where files will be saved. Optional - defaults to "output" if not provided. Path will be normalized to remove excessive slashes and invalid characters are not allowed (< > : " | ? *). Maximum length is 900 characters.',
|
||||
"maxLength": 900,
|
||||
"pattern": '^[^<>:"|?*]+$',
|
||||
"default": "output",
|
||||
"description": "The directory path within the bucket where files will be saved.",
|
||||
},
|
||||
},
|
||||
"required": ["bucket_name"],
|
||||
"required": ["bucket_name", "output_directory"],
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
|
||||
from api.v1.serializer_utils.base import YamlOrJsonField
|
||||
|
||||
from prowler.lib.mutelist.mutelist import mutelist_schema
|
||||
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Mutelist",
|
||||
"properties": {"Mutelist": mutelist_schema},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
class ProcessorConfigField(YamlOrJsonField):
|
||||
pass
|
||||
@@ -119,9 +119,9 @@ from rest_framework_json_api import serializers
|
||||
"type": "email",
|
||||
"description": "User microsoft email address.",
|
||||
},
|
||||
"password": {
|
||||
"encrypted_password": {
|
||||
"type": "string",
|
||||
"description": "User password.",
|
||||
"description": "User encrypted password.",
|
||||
},
|
||||
},
|
||||
"required": [
|
||||
@@ -129,7 +129,7 @@ from rest_framework_json_api import serializers
|
||||
"client_secret",
|
||||
"tenant_id",
|
||||
"user",
|
||||
"password",
|
||||
"encrypted_password",
|
||||
],
|
||||
},
|
||||
{
|
||||
@@ -154,17 +154,6 @@ from rest_framework_json_api import serializers
|
||||
},
|
||||
"required": ["client_id", "client_secret", "refresh_token"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GCP Service Account Key",
|
||||
"properties": {
|
||||
"service_account_key": {
|
||||
"type": "object",
|
||||
"description": "The service account key for GCP.",
|
||||
}
|
||||
},
|
||||
"required": ["service_account_key"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Kubernetes Static Credentials",
|
||||
@@ -176,43 +165,6 @@ from rest_framework_json_api import serializers
|
||||
},
|
||||
"required": ["kubeconfig_content"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GitHub Personal Access Token",
|
||||
"properties": {
|
||||
"personal_access_token": {
|
||||
"type": "string",
|
||||
"description": "GitHub personal access token for authentication.",
|
||||
}
|
||||
},
|
||||
"required": ["personal_access_token"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GitHub OAuth App Token",
|
||||
"properties": {
|
||||
"oauth_app_token": {
|
||||
"type": "string",
|
||||
"description": "GitHub OAuth App token for authentication.",
|
||||
}
|
||||
},
|
||||
"required": ["oauth_app_token"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GitHub App Credentials",
|
||||
"properties": {
|
||||
"github_app_id": {
|
||||
"type": "integer",
|
||||
"description": "GitHub App ID for authentication.",
|
||||
},
|
||||
"github_app_key": {
|
||||
"type": "string",
|
||||
"description": "Path to the GitHub App private key file.",
|
||||
},
|
||||
},
|
||||
"required": ["github_app_id", "github_app_key"],
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
@@ -7,23 +7,20 @@ from django.contrib.auth.models import update_last_login
|
||||
from django.contrib.auth.password_validation import validate_password
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from jwt.exceptions import InvalidKeyError
|
||||
from rest_framework.validators import UniqueTogetherValidator
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.relations import SerializerMethodResourceRelatedField
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import TokenError
|
||||
from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
IntegrationProviderRelationship,
|
||||
Invitation,
|
||||
InvitationRoleRelationship,
|
||||
LighthouseConfiguration,
|
||||
Membership,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderGroupMembership,
|
||||
@@ -32,10 +29,8 @@ from api.models import (
|
||||
ResourceTag,
|
||||
Role,
|
||||
RoleProviderGroupRelationship,
|
||||
SAMLConfiguration,
|
||||
Scan,
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
@@ -47,9 +42,7 @@ from api.v1.serializer_utils.integrations import (
|
||||
IntegrationCredentialField,
|
||||
S3ConfigSerializer,
|
||||
)
|
||||
from api.v1.serializer_utils.processors import ProcessorConfigField
|
||||
from api.v1.serializer_utils.providers import ProviderSecretField
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
|
||||
# Tokens
|
||||
|
||||
@@ -135,12 +128,6 @@ class TokenSerializer(BaseTokenSerializer):
|
||||
|
||||
class TokenSocialLoginSerializer(BaseTokenSerializer):
|
||||
email = serializers.EmailField(write_only=True)
|
||||
tenant_id = serializers.UUIDField(
|
||||
write_only=True,
|
||||
required=False,
|
||||
help_text="If not provided, the tenant ID of the first membership that was added"
|
||||
" to the user will be used.",
|
||||
)
|
||||
|
||||
# Output tokens
|
||||
refresh = serializers.CharField(read_only=True)
|
||||
@@ -862,7 +849,6 @@ class ScanSerializer(RLSSerializer):
|
||||
"completed_at",
|
||||
"scheduled_at",
|
||||
"next_scan_at",
|
||||
"processor",
|
||||
"url",
|
||||
]
|
||||
|
||||
@@ -1000,12 +986,8 @@ class ResourceSerializer(RLSSerializer):
|
||||
|
||||
tags = serializers.SerializerMethodField()
|
||||
type_ = serializers.CharField(read_only=True)
|
||||
failed_findings_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
findings = SerializerMethodResourceRelatedField(
|
||||
many=True,
|
||||
read_only=True,
|
||||
)
|
||||
findings = serializers.ResourceRelatedField(many=True, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
@@ -1021,7 +1003,6 @@ class ResourceSerializer(RLSSerializer):
|
||||
"tags",
|
||||
"provider",
|
||||
"findings",
|
||||
"failed_findings_count",
|
||||
"url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
@@ -1031,8 +1012,8 @@ class ResourceSerializer(RLSSerializer):
|
||||
}
|
||||
|
||||
included_serializers = {
|
||||
"findings": "api.v1.serializers.FindingIncludeSerializer",
|
||||
"provider": "api.v1.serializers.ProviderIncludeSerializer",
|
||||
"findings": "api.v1.serializers.FindingSerializer",
|
||||
"provider": "api.v1.serializers.ProviderSerializer",
|
||||
}
|
||||
|
||||
@extend_schema_field(
|
||||
@@ -1043,10 +1024,6 @@ class ResourceSerializer(RLSSerializer):
|
||||
}
|
||||
)
|
||||
def get_tags(self, obj):
|
||||
# Use prefetched tags if available to avoid N+1 queries
|
||||
if hasattr(obj, "prefetched_tags"):
|
||||
return {tag.key: tag.value for tag in obj.prefetched_tags}
|
||||
# Fallback to the original method if prefetch is not available
|
||||
return obj.get_tags(self.context.get("tenant_id"))
|
||||
|
||||
def get_fields(self):
|
||||
@@ -1056,17 +1033,10 @@ class ResourceSerializer(RLSSerializer):
|
||||
fields["type"] = type_
|
||||
return fields
|
||||
|
||||
def get_findings(self, obj):
|
||||
return (
|
||||
obj.latest_findings
|
||||
if hasattr(obj, "latest_findings")
|
||||
else obj.findings.all()
|
||||
)
|
||||
|
||||
|
||||
class ResourceIncludeSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the included Resource model.
|
||||
Serializer for the Resource model.
|
||||
"""
|
||||
|
||||
tags = serializers.SerializerMethodField()
|
||||
@@ -1099,10 +1069,6 @@ class ResourceIncludeSerializer(RLSSerializer):
|
||||
}
|
||||
)
|
||||
def get_tags(self, obj):
|
||||
# Use prefetched tags if available to avoid N+1 queries
|
||||
if hasattr(obj, "prefetched_tags"):
|
||||
return {tag.key: tag.value for tag in obj.prefetched_tags}
|
||||
# Fallback to the original method if prefetch is not available
|
||||
return obj.get_tags(self.context.get("tenant_id"))
|
||||
|
||||
def get_fields(self):
|
||||
@@ -1113,17 +1079,6 @@ class ResourceIncludeSerializer(RLSSerializer):
|
||||
return fields
|
||||
|
||||
|
||||
class ResourceMetadataSerializer(serializers.Serializer):
|
||||
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
types = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# tags = serializers.JSONField(help_text="Tags are described as key-value pairs.")
|
||||
|
||||
class Meta:
|
||||
resource_name = "resources-metadata"
|
||||
|
||||
|
||||
class FindingSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the Finding model.
|
||||
@@ -1147,7 +1102,6 @@ class FindingSerializer(RLSSerializer):
|
||||
"updated_at",
|
||||
"first_seen_at",
|
||||
"muted",
|
||||
"muted_reason",
|
||||
"url",
|
||||
# Relationships
|
||||
"scan",
|
||||
@@ -1160,28 +1114,6 @@ class FindingSerializer(RLSSerializer):
|
||||
}
|
||||
|
||||
|
||||
class FindingIncludeSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the include Finding model.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = [
|
||||
"id",
|
||||
"uid",
|
||||
"status",
|
||||
"severity",
|
||||
"check_id",
|
||||
"check_metadata",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"first_seen_at",
|
||||
"muted",
|
||||
"muted_reason",
|
||||
]
|
||||
|
||||
|
||||
# To be removed when the related endpoint is removed as well
|
||||
class FindingDynamicFilterSerializer(serializers.Serializer):
|
||||
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
@@ -1217,8 +1149,6 @@ class BaseWriteProviderSecretSerializer(BaseWriteSerializer):
|
||||
serializer = AzureProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.GCP.value:
|
||||
serializer = GCPProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.GITHUB.value:
|
||||
serializer = GithubProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.KUBERNETES.value:
|
||||
serializer = KubernetesProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.M365.value:
|
||||
@@ -1229,8 +1159,6 @@ class BaseWriteProviderSecretSerializer(BaseWriteSerializer):
|
||||
)
|
||||
elif secret_type == ProviderSecret.TypeChoices.ROLE:
|
||||
serializer = AWSRoleAssumptionProviderSecret(data=secret)
|
||||
elif secret_type == ProviderSecret.TypeChoices.SERVICE_ACCOUNT:
|
||||
serializer = GCPServiceAccountProviderSecret(data=secret)
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
{"secret_type": f"Secret type not supported: {secret_type}"}
|
||||
@@ -1268,8 +1196,8 @@ class M365ProviderSecret(serializers.Serializer):
|
||||
client_id = serializers.CharField()
|
||||
client_secret = serializers.CharField()
|
||||
tenant_id = serializers.CharField()
|
||||
user = serializers.EmailField(required=False)
|
||||
password = serializers.CharField(required=False)
|
||||
user = serializers.EmailField()
|
||||
encrypted_password = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
@@ -1284,13 +1212,6 @@ class GCPProviderSecret(serializers.Serializer):
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class GCPServiceAccountProviderSecret(serializers.Serializer):
|
||||
service_account_key = serializers.JSONField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class KubernetesProviderSecret(serializers.Serializer):
|
||||
kubeconfig_content = serializers.CharField()
|
||||
|
||||
@@ -1298,16 +1219,6 @@ class KubernetesProviderSecret(serializers.Serializer):
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class GithubProviderSecret(serializers.Serializer):
|
||||
personal_access_token = serializers.CharField(required=False)
|
||||
oauth_app_token = serializers.CharField(required=False)
|
||||
github_app_id = serializers.IntegerField(required=False)
|
||||
github_app_key_content = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class AWSRoleAssumptionProviderSecret(serializers.Serializer):
|
||||
role_arn = serializers.CharField()
|
||||
external_id = serializers.CharField()
|
||||
@@ -1387,13 +1298,12 @@ class ProviderSecretUpdateSerializer(BaseWriteProviderSecretSerializer):
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"provider": {"read_only": True},
|
||||
"secret_type": {"required": False},
|
||||
"secret_type": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
provider = self.instance.provider
|
||||
# To allow updating a secret with the same type without making the `secret_type` mandatory
|
||||
secret_type = attrs.get("secret_type") or self.instance.secret_type
|
||||
secret_type = self.instance.secret_type
|
||||
secret = attrs.get("secret")
|
||||
|
||||
validated_attrs = super().validate(attrs)
|
||||
@@ -1760,63 +1670,130 @@ class RoleProviderGroupRelationshipSerializer(RLSSerializer, BaseWriteSerializer
|
||||
# Compliance overview
|
||||
|
||||
|
||||
class ComplianceOverviewSerializer(serializers.Serializer):
|
||||
class ComplianceOverviewSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for compliance requirement status aggregated by compliance framework.
|
||||
|
||||
This serializer is used to format aggregated compliance framework data,
|
||||
providing counts of passed, failed, and manual requirements along with
|
||||
an overall global status for each framework.
|
||||
Serializer for the ComplianceOverview model.
|
||||
"""
|
||||
|
||||
# Add ID field which will be used for resource identification
|
||||
id = serializers.CharField()
|
||||
framework = serializers.CharField()
|
||||
version = serializers.CharField()
|
||||
requirements_passed = serializers.IntegerField()
|
||||
requirements_failed = serializers.IntegerField()
|
||||
requirements_manual = serializers.IntegerField()
|
||||
total_requirements = serializers.IntegerField()
|
||||
requirements_status = serializers.SerializerMethodField(
|
||||
read_only=True, method_name="get_requirements_status"
|
||||
)
|
||||
provider_type = serializers.SerializerMethodField(read_only=True)
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "compliance-overviews"
|
||||
class Meta:
|
||||
model = ComplianceOverview
|
||||
fields = [
|
||||
"id",
|
||||
"inserted_at",
|
||||
"compliance_id",
|
||||
"framework",
|
||||
"version",
|
||||
"requirements_status",
|
||||
"region",
|
||||
"provider_type",
|
||||
"scan",
|
||||
"url",
|
||||
]
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"passed": {"type": "integer"},
|
||||
"failed": {"type": "integer"},
|
||||
"manual": {"type": "integer"},
|
||||
"total": {"type": "integer"},
|
||||
},
|
||||
}
|
||||
)
|
||||
def get_requirements_status(self, obj):
|
||||
return {
|
||||
"passed": obj.requirements_passed,
|
||||
"failed": obj.requirements_failed,
|
||||
"manual": obj.requirements_manual,
|
||||
"total": obj.total_requirements,
|
||||
}
|
||||
|
||||
@extend_schema_field(serializers.CharField(allow_null=True))
|
||||
def get_provider_type(self, obj):
|
||||
"""
|
||||
Retrieves the provider_type from scan.provider.provider_type.
|
||||
"""
|
||||
try:
|
||||
return obj.scan.provider.provider
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
|
||||
class ComplianceOverviewDetailSerializer(serializers.Serializer):
|
||||
"""
|
||||
Serializer for detailed compliance requirement information.
|
||||
class ComplianceOverviewFullSerializer(ComplianceOverviewSerializer):
|
||||
requirements = serializers.SerializerMethodField(read_only=True)
|
||||
|
||||
This serializer formats the aggregated requirement data, showing detailed status
|
||||
and counts for each requirement across all regions.
|
||||
"""
|
||||
class Meta(ComplianceOverviewSerializer.Meta):
|
||||
fields = ComplianceOverviewSerializer.Meta.fields + [
|
||||
"description",
|
||||
"requirements",
|
||||
]
|
||||
|
||||
id = serializers.CharField()
|
||||
framework = serializers.CharField()
|
||||
version = serializers.CharField()
|
||||
description = serializers.CharField()
|
||||
status = serializers.ChoiceField(choices=StatusChoices.choices)
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "compliance-requirements-details"
|
||||
|
||||
|
||||
class ComplianceOverviewAttributesSerializer(serializers.Serializer):
|
||||
id = serializers.CharField()
|
||||
framework_description = serializers.CharField()
|
||||
name = serializers.CharField()
|
||||
framework = serializers.CharField()
|
||||
version = serializers.CharField()
|
||||
description = serializers.CharField()
|
||||
attributes = serializers.JSONField()
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "compliance-requirements-attributes"
|
||||
@extend_schema_field(
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"requirement_id": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"checks": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"check_name": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["PASS", "FAIL", None],
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
"description": "Each key in the 'checks' object is a check name, with values as "
|
||||
"'PASS', 'FAIL', or null.",
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["PASS", "FAIL", "MANUAL"],
|
||||
},
|
||||
"attributes": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
"description": {"type": "string"},
|
||||
"checks_status": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"total": {"type": "integer"},
|
||||
"pass": {"type": "integer"},
|
||||
"fail": {"type": "integer"},
|
||||
"manual": {"type": "integer"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
def get_requirements(self, obj):
|
||||
"""
|
||||
Returns the detailed structure of requirements.
|
||||
"""
|
||||
return obj.requirements
|
||||
|
||||
|
||||
class ComplianceOverviewMetadataSerializer(serializers.Serializer):
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
|
||||
class JSONAPIMeta:
|
||||
class Meta:
|
||||
resource_name = "compliance-overviews-metadata"
|
||||
|
||||
|
||||
@@ -1950,16 +1927,6 @@ class ScheduleDailyCreateSerializer(serializers.Serializer):
|
||||
|
||||
|
||||
class BaseWriteIntegrationSerializer(BaseWriteSerializer):
|
||||
def validate(self, attrs):
|
||||
if Integration.objects.filter(
|
||||
configuration=attrs.get("configuration")
|
||||
).exists():
|
||||
raise serializers.ValidationError(
|
||||
{"name": "This integration already exists."}
|
||||
)
|
||||
|
||||
return super().validate(attrs)
|
||||
|
||||
@staticmethod
|
||||
def validate_integration_data(
|
||||
integration_type: str,
|
||||
@@ -1967,7 +1934,7 @@ class BaseWriteIntegrationSerializer(BaseWriteSerializer):
|
||||
configuration: dict,
|
||||
credentials: dict,
|
||||
):
|
||||
if integration_type == Integration.IntegrationChoices.AMAZON_S3:
|
||||
if integration_type == Integration.IntegrationChoices.S3:
|
||||
config_serializer = S3ConfigSerializer
|
||||
credentials_serializers = [AWSCredentialSerializer]
|
||||
# TODO: This will be required for AWS Security Hub
|
||||
@@ -1985,11 +1952,7 @@ class BaseWriteIntegrationSerializer(BaseWriteSerializer):
|
||||
}
|
||||
)
|
||||
|
||||
serializer_instance = config_serializer(data=configuration)
|
||||
serializer_instance.is_valid(raise_exception=True)
|
||||
|
||||
# Apply the validated (and potentially transformed) data back to configuration
|
||||
configuration.update(serializer_instance.validated_data)
|
||||
config_serializer(data=configuration).is_valid(raise_exception=True)
|
||||
|
||||
for cred_serializer in credentials_serializers:
|
||||
try:
|
||||
@@ -2068,6 +2031,7 @@ class IntegrationCreateSerializer(BaseWriteIntegrationSerializer):
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"connected": {"read_only": True},
|
||||
"enabled": {"read_only": True},
|
||||
"connection_last_checked_at": {"read_only": True},
|
||||
}
|
||||
|
||||
@@ -2077,10 +2041,10 @@ class IntegrationCreateSerializer(BaseWriteIntegrationSerializer):
|
||||
configuration = attrs.get("configuration")
|
||||
credentials = attrs.get("credentials")
|
||||
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_integration_data(
|
||||
integration_type, providers, configuration, credentials
|
||||
)
|
||||
validated_attrs = super().validate(attrs)
|
||||
return validated_attrs
|
||||
|
||||
def create(self, validated_data):
|
||||
@@ -2131,7 +2095,6 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
super().validate(attrs)
|
||||
integration_type = self.instance.integration_type
|
||||
providers = attrs.get("providers")
|
||||
configuration = attrs.get("configuration") or self.instance.configuration
|
||||
@@ -2156,278 +2119,3 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
|
||||
IntegrationProviderRelationship.objects.bulk_create(new_relationships)
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
# Processors
|
||||
|
||||
|
||||
class ProcessorSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the Processor model.
|
||||
"""
|
||||
|
||||
configuration = ProcessorConfigField()
|
||||
|
||||
class Meta:
|
||||
model = Processor
|
||||
fields = [
|
||||
"id",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"processor_type",
|
||||
"configuration",
|
||||
"url",
|
||||
]
|
||||
|
||||
|
||||
class ProcessorCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
configuration = ProcessorConfigField(required=True)
|
||||
|
||||
class Meta:
|
||||
model = Processor
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"processor_type",
|
||||
"configuration",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
validators = [
|
||||
UniqueTogetherValidator(
|
||||
queryset=Processor.objects.all(),
|
||||
fields=["processor_type"],
|
||||
message="A processor with the same type already exists.",
|
||||
)
|
||||
]
|
||||
|
||||
def validate(self, attrs):
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_processor_data(attrs)
|
||||
return validated_attrs
|
||||
|
||||
def validate_processor_data(self, attrs):
|
||||
processor_type = attrs.get("processor_type")
|
||||
configuration = attrs.get("configuration")
|
||||
if processor_type == "mutelist":
|
||||
self.validate_mutelist_configuration(configuration)
|
||||
|
||||
def validate_mutelist_configuration(self, configuration):
|
||||
if not isinstance(configuration, dict):
|
||||
raise serializers.ValidationError("Invalid Mutelist configuration.")
|
||||
|
||||
mutelist_configuration = configuration.get("Mutelist", {})
|
||||
|
||||
if not mutelist_configuration:
|
||||
raise serializers.ValidationError(
|
||||
"Invalid Mutelist configuration: 'Mutelist' is a required property."
|
||||
)
|
||||
|
||||
try:
|
||||
Mutelist.validate_mutelist(mutelist_configuration, raise_on_exception=True)
|
||||
return
|
||||
except Exception as error:
|
||||
raise serializers.ValidationError(
|
||||
f"Invalid Mutelist configuration: {error}"
|
||||
)
|
||||
|
||||
|
||||
class ProcessorUpdateSerializer(BaseWriteSerializer):
|
||||
configuration = ProcessorConfigField(required=True)
|
||||
|
||||
class Meta:
|
||||
model = Processor
|
||||
fields = [
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"configuration",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
validated_attrs = super().validate(attrs)
|
||||
self.validate_processor_data(attrs)
|
||||
return validated_attrs
|
||||
|
||||
def validate_processor_data(self, attrs):
|
||||
processor_type = self.instance.processor_type
|
||||
configuration = attrs.get("configuration")
|
||||
if processor_type == "mutelist":
|
||||
self.validate_mutelist_configuration(configuration)
|
||||
|
||||
def validate_mutelist_configuration(self, configuration):
|
||||
if not isinstance(configuration, dict):
|
||||
raise serializers.ValidationError("Invalid Mutelist configuration.")
|
||||
|
||||
mutelist_configuration = configuration.get("Mutelist", {})
|
||||
|
||||
if not mutelist_configuration:
|
||||
raise serializers.ValidationError(
|
||||
"Invalid Mutelist configuration: 'Mutelist' is a required property."
|
||||
)
|
||||
|
||||
try:
|
||||
Mutelist.validate_mutelist(mutelist_configuration, raise_on_exception=True)
|
||||
return
|
||||
except Exception as error:
|
||||
raise serializers.ValidationError(
|
||||
f"Invalid Mutelist configuration: {error}"
|
||||
)
|
||||
|
||||
|
||||
# SSO
|
||||
|
||||
|
||||
class SamlInitiateSerializer(serializers.Serializer):
|
||||
email_domain = serializers.CharField()
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "saml-initiate"
|
||||
|
||||
|
||||
class SamlMetadataSerializer(serializers.Serializer):
|
||||
class JSONAPIMeta:
|
||||
resource_name = "saml-meta"
|
||||
|
||||
|
||||
class SAMLConfigurationSerializer(RLSSerializer):
|
||||
class Meta:
|
||||
model = SAMLConfiguration
|
||||
fields = ["id", "email_domain", "metadata_xml", "created_at", "updated_at"]
|
||||
read_only_fields = ["id", "created_at", "updated_at"]
|
||||
|
||||
|
||||
class LighthouseConfigSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the LighthouseConfig model.
|
||||
"""
|
||||
|
||||
api_key = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"api_key",
|
||||
"model",
|
||||
"temperature",
|
||||
"max_tokens",
|
||||
"business_context",
|
||||
"is_active",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"is_active": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
# Check if api_key is specifically requested in fields param
|
||||
fields_param = self.context.get("request", None) and self.context[
|
||||
"request"
|
||||
].query_params.get("fields[lighthouse-config]", "")
|
||||
if fields_param == "api_key":
|
||||
# Return decrypted key if specifically requested
|
||||
data["api_key"] = instance.api_key_decoded if instance.api_key else None
|
||||
else:
|
||||
# Return masked key for general requests
|
||||
data["api_key"] = "*" * len(instance.api_key) if instance.api_key else None
|
||||
return data
|
||||
|
||||
|
||||
class LighthouseConfigCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""Serializer for creating new Lighthouse configurations."""
|
||||
|
||||
api_key = serializers.CharField(write_only=True, required=True)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"api_key",
|
||||
"model",
|
||||
"temperature",
|
||||
"max_tokens",
|
||||
"business_context",
|
||||
"is_active",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"is_active": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
tenant_id = self.context.get("request").tenant_id
|
||||
if LighthouseConfiguration.objects.filter(tenant_id=tenant_id).exists():
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"tenant_id": "Lighthouse configuration already exists for this tenant."
|
||||
}
|
||||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
api_key = validated_data.pop("api_key")
|
||||
instance = super().create(validated_data)
|
||||
instance.api_key_decoded = api_key
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
# Always mask the API key in the response
|
||||
data["api_key"] = "*" * len(instance.api_key) if instance.api_key else None
|
||||
return data
|
||||
|
||||
|
||||
class LighthouseConfigUpdateSerializer(BaseWriteSerializer):
|
||||
"""
|
||||
Serializer for updating LighthouseConfig instances.
|
||||
"""
|
||||
|
||||
api_key = serializers.CharField(write_only=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"api_key",
|
||||
"model",
|
||||
"temperature",
|
||||
"max_tokens",
|
||||
"business_context",
|
||||
"is_active",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"is_active": {"read_only": True},
|
||||
"name": {"required": False},
|
||||
"model": {"required": False},
|
||||
"temperature": {"required": False},
|
||||
"max_tokens": {"required": False},
|
||||
}
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
api_key = validated_data.pop("api_key", None)
|
||||
instance = super().update(instance, validated_data)
|
||||
if api_key:
|
||||
instance.api_key_decoded = api_key
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
from allauth.socialaccount.providers.saml.views import ACSView, MetadataView, SLSView
|
||||
from django.urls import include, path
|
||||
from drf_spectacular.views import SpectacularRedocView
|
||||
from rest_framework_nested import routers
|
||||
|
||||
from api.v1.views import (
|
||||
ComplianceOverviewViewSet,
|
||||
CustomSAMLLoginView,
|
||||
CustomTokenObtainView,
|
||||
CustomTokenRefreshView,
|
||||
CustomTokenSwitchTenantView,
|
||||
@@ -15,10 +13,8 @@ from api.v1.views import (
|
||||
IntegrationViewSet,
|
||||
InvitationAcceptViewSet,
|
||||
InvitationViewSet,
|
||||
LighthouseConfigViewSet,
|
||||
MembershipViewSet,
|
||||
OverviewViewSet,
|
||||
ProcessorViewSet,
|
||||
ProviderGroupProvidersRelationshipView,
|
||||
ProviderGroupViewSet,
|
||||
ProviderSecretViewSet,
|
||||
@@ -26,14 +22,10 @@ from api.v1.views import (
|
||||
ResourceViewSet,
|
||||
RoleProviderGroupRelationshipView,
|
||||
RoleViewSet,
|
||||
SAMLConfigurationViewSet,
|
||||
SAMLInitiateAPIView,
|
||||
SAMLTokenValidateView,
|
||||
ScanViewSet,
|
||||
ScheduleViewSet,
|
||||
SchemaView,
|
||||
TaskViewSet,
|
||||
TenantFinishACSView,
|
||||
TenantMembersViewSet,
|
||||
TenantViewSet,
|
||||
UserRoleRelationshipView,
|
||||
@@ -57,13 +49,6 @@ router.register(
|
||||
router.register(r"overviews", OverviewViewSet, basename="overview")
|
||||
router.register(r"schedules", ScheduleViewSet, basename="schedule")
|
||||
router.register(r"integrations", IntegrationViewSet, basename="integration")
|
||||
router.register(r"processors", ProcessorViewSet, basename="processor")
|
||||
router.register(r"saml-config", SAMLConfigurationViewSet, basename="saml-config")
|
||||
router.register(
|
||||
r"lighthouse-configurations",
|
||||
LighthouseConfigViewSet,
|
||||
basename="lighthouseconfiguration",
|
||||
)
|
||||
|
||||
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
|
||||
tenants_router.register(
|
||||
@@ -127,36 +112,6 @@ urlpatterns = [
|
||||
),
|
||||
name="provider_group-providers-relationship",
|
||||
),
|
||||
# API endpoint to start SAML SSO flow
|
||||
path(
|
||||
"auth/saml/initiate/", SAMLInitiateAPIView.as_view(), name="api_saml_initiate"
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/login/",
|
||||
CustomSAMLLoginView.as_view(),
|
||||
name="saml_login",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/acs/",
|
||||
ACSView.as_view(),
|
||||
name="saml_acs",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/acs/finish/",
|
||||
TenantFinishACSView.as_view(),
|
||||
name="saml_finish_acs",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/sls/",
|
||||
SLSView.as_view(),
|
||||
name="saml_sls",
|
||||
),
|
||||
path(
|
||||
"accounts/saml/<organization_slug>/metadata/",
|
||||
MetadataView.as_view(),
|
||||
name="saml_metadata",
|
||||
),
|
||||
path("tokens/saml", SAMLTokenValidateView.as_view(), name="token-saml"),
|
||||
path("tokens/google", GoogleSocialLoginView.as_view(), name="token-google"),
|
||||
path("tokens/github", GithubSocialLoginView.as_view(), name="token-github"),
|
||||
path("", include(router.urls)),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,3 @@
|
||||
import string
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
@@ -22,89 +20,3 @@ class MaximumLengthValidator:
|
||||
return _(
|
||||
f"Your password must contain no more than {self.max_length} characters."
|
||||
)
|
||||
|
||||
|
||||
class SpecialCharactersValidator:
|
||||
def __init__(self, special_characters=None, min_special_characters=1):
|
||||
# Use string.punctuation if no custom characters provided
|
||||
self.special_characters = special_characters or string.punctuation
|
||||
self.min_special_characters = min_special_characters
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if (
|
||||
sum(1 for char in password if char in self.special_characters)
|
||||
< self.min_special_characters
|
||||
):
|
||||
raise ValidationError(
|
||||
_("This password must contain at least one special character."),
|
||||
code="password_no_special_characters",
|
||||
params={
|
||||
"special_characters": self.special_characters,
|
||||
"min_special_characters": self.min_special_characters,
|
||||
},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least one special character from: {self.special_characters}"
|
||||
)
|
||||
|
||||
|
||||
class UppercaseValidator:
|
||||
def __init__(self, min_uppercase=1):
|
||||
self.min_uppercase = min_uppercase
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if sum(1 for char in password if char.isupper()) < self.min_uppercase:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"This password must contain at least %(min_uppercase)d uppercase letter."
|
||||
),
|
||||
code="password_no_uppercase_letters",
|
||||
params={"min_uppercase": self.min_uppercase},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least {self.min_uppercase} uppercase letter."
|
||||
)
|
||||
|
||||
|
||||
class LowercaseValidator:
|
||||
def __init__(self, min_lowercase=1):
|
||||
self.min_lowercase = min_lowercase
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if sum(1 for char in password if char.islower()) < self.min_lowercase:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"This password must contain at least %(min_lowercase)d lowercase letter."
|
||||
),
|
||||
code="password_no_lowercase_letters",
|
||||
params={"min_lowercase": self.min_lowercase},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least {self.min_lowercase} lowercase letter."
|
||||
)
|
||||
|
||||
|
||||
class NumericValidator:
|
||||
def __init__(self, min_numeric=1):
|
||||
self.min_numeric = min_numeric
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if sum(1 for char in password if char.isdigit()) < self.min_numeric:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"This password must contain at least %(min_numeric)d numeric character."
|
||||
),
|
||||
code="password_no_numeric_characters",
|
||||
params={"min_numeric": self.min_numeric},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain at least {self.min_numeric} numeric character."
|
||||
)
|
||||
|
||||
@@ -1,13 +1,6 @@
|
||||
import warnings
|
||||
|
||||
from celery import Celery, Task
|
||||
from config.env import env
|
||||
|
||||
# Suppress specific warnings from django-rest-auth: https://github.com/iMerica/dj-rest-auth/issues/684
|
||||
warnings.filterwarnings(
|
||||
"ignore", category=UserWarning, module="dj_rest_auth.registration.serializers"
|
||||
)
|
||||
|
||||
BROKER_VISIBILITY_TIMEOUT = env.int("DJANGO_BROKER_VISIBILITY_TIMEOUT", default=86400)
|
||||
|
||||
celery_app = Celery("tasks")
|
||||
|
||||
@@ -10,8 +10,6 @@ from config.settings.social_login import * # noqa
|
||||
SECRET_KEY = env("SECRET_KEY", default="secret")
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = ["localhost", "127.0.0.1"]
|
||||
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
|
||||
USE_X_FORWARDED_HOST = True
|
||||
|
||||
# Application definition
|
||||
|
||||
@@ -28,19 +26,16 @@ INSTALLED_APPS = [
|
||||
"rest_framework",
|
||||
"corsheaders",
|
||||
"drf_spectacular",
|
||||
"drf_spectacular_jsonapi",
|
||||
"django_guid",
|
||||
"rest_framework_json_api",
|
||||
"django_celery_results",
|
||||
"django_celery_beat",
|
||||
"rest_framework_simplejwt.token_blacklist",
|
||||
"allauth",
|
||||
"django.contrib.sites",
|
||||
"allauth.account",
|
||||
"allauth.socialaccount",
|
||||
"allauth.socialaccount.providers.google",
|
||||
"allauth.socialaccount.providers.github",
|
||||
"allauth.socialaccount.providers.saml",
|
||||
"dj_rest_auth.registration",
|
||||
"rest_framework.authtoken",
|
||||
]
|
||||
@@ -159,30 +154,6 @@ AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.SpecialCharactersValidator",
|
||||
"OPTIONS": {
|
||||
"min_special_characters": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.UppercaseValidator",
|
||||
"OPTIONS": {
|
||||
"min_uppercase": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.LowercaseValidator",
|
||||
"OPTIONS": {
|
||||
"min_lowercase": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.NumericValidator",
|
||||
"OPTIONS": {
|
||||
"min_numeric": 1,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
SIMPLE_JWT = {
|
||||
@@ -267,13 +238,8 @@ DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY = env.str(
|
||||
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN = env.str("DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN", "")
|
||||
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION = env.str("DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION", "")
|
||||
|
||||
DJANGO_DELETION_BATCH_SIZE = env.int("DJANGO_DELETION_BATCH_SIZE", 5000)
|
||||
# HTTP Security Headers
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = True
|
||||
X_FRAME_OPTIONS = "DENY"
|
||||
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
|
||||
|
||||
DJANGO_DELETION_BATCH_SIZE = env.int("DJANGO_DELETION_BATCH_SIZE", 5000)
|
||||
|
||||
# SAML requirement
|
||||
CSRF_COOKIE_SECURE = True
|
||||
SESSION_COOKIE_SECURE = True
|
||||
|
||||
@@ -4,7 +4,6 @@ from config.env import env
|
||||
IGNORED_EXCEPTIONS = [
|
||||
# Provider is not connected due to credentials errors
|
||||
"is not connected",
|
||||
"ProviderConnectionError",
|
||||
# Authentication Errors from AWS
|
||||
"InvalidToken",
|
||||
"AccessDeniedException",
|
||||
@@ -17,7 +16,7 @@ IGNORED_EXCEPTIONS = [
|
||||
"InternalServerErrorException",
|
||||
"AccessDenied",
|
||||
"No Shodan API Key", # Shodan Check
|
||||
"RequestLimitExceeded", # For now, we don't want to log the RequestLimitExceeded errors
|
||||
"RequestLimitExceeded", # For now we don't want to log the RequestLimitExceeded errors
|
||||
"ThrottlingException",
|
||||
"Rate exceeded",
|
||||
"SubscriptionRequiredException",
|
||||
@@ -43,9 +42,7 @@ IGNORED_EXCEPTIONS = [
|
||||
"AWSAccessKeyIDInvalidError",
|
||||
"AWSSessionTokenExpiredError",
|
||||
"EndpointConnectionError", # AWS Service is not available in a region
|
||||
# The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an
|
||||
# unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
|
||||
"Pool is closed",
|
||||
"Pool is closed", # The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
|
||||
# Authentication Errors from GCP
|
||||
"ClientAuthenticationError",
|
||||
"AuthorizationFailed",
|
||||
@@ -69,15 +66,12 @@ IGNORED_EXCEPTIONS = [
|
||||
"AzureClientIdAndClientSecretNotBelongingToTenantIdError",
|
||||
"AzureHTTPResponseError",
|
||||
"Error with credentials provided",
|
||||
# PowerShell Errors in User Authentication
|
||||
"Microsoft Teams User Auth connection failed: Please check your permissions and try again.",
|
||||
"Exchange Online User Auth connection failed: Please check your permissions and try again.",
|
||||
]
|
||||
|
||||
|
||||
def before_send(event, hint):
|
||||
"""
|
||||
before_send handles the Sentry events in order to send them or not
|
||||
before_send handles the Sentry events in order to sent them or not
|
||||
"""
|
||||
# Ignore logs with the ignored_exceptions
|
||||
# https://docs.python.org/3/library/logging.html#logrecord-objects
|
||||
@@ -85,16 +79,9 @@ def before_send(event, hint):
|
||||
log_msg = hint["log_record"].msg
|
||||
log_lvl = hint["log_record"].levelno
|
||||
|
||||
# Handle Error and Critical events and discard the rest
|
||||
if log_lvl <= 40 and any(ignored in log_msg for ignored in IGNORED_EXCEPTIONS):
|
||||
return None # Explicitly return None to drop the event
|
||||
|
||||
# Ignore exceptions with the ignored_exceptions
|
||||
if "exc_info" in hint and hint["exc_info"]:
|
||||
exc_value = str(hint["exc_info"][1])
|
||||
if any(ignored in exc_value for ignored in IGNORED_EXCEPTIONS):
|
||||
return None # Explicitly return None to drop the event
|
||||
|
||||
# Handle Error events and discard the rest
|
||||
if log_lvl == 40 and any(ignored in log_msg for ignored in IGNORED_EXCEPTIONS):
|
||||
return
|
||||
return event
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,8 @@ GITHUB_OAUTH_CALLBACK_URL = env("SOCIAL_GITHUB_OAUTH_CALLBACK_URL", default="")
|
||||
|
||||
# Allauth settings
|
||||
ACCOUNT_LOGIN_METHODS = {"email"} # Use Email / Password authentication
|
||||
ACCOUNT_SIGNUP_FIELDS = ["email*", "password1*", "password2*"]
|
||||
ACCOUNT_USERNAME_REQUIRED = False
|
||||
ACCOUNT_EMAIL_REQUIRED = True
|
||||
ACCOUNT_EMAIL_VERIFICATION = "none" # Do not require email confirmation
|
||||
ACCOUNT_USER_MODEL_USERNAME_FIELD = None
|
||||
REST_AUTH = {
|
||||
@@ -24,20 +25,6 @@ SOCIALACCOUNT_EMAIL_AUTHENTICATION = True
|
||||
# Connect local account and social account if local account with that email address already exists
|
||||
SOCIALACCOUNT_EMAIL_AUTHENTICATION_AUTO_CONNECT = True
|
||||
SOCIALACCOUNT_ADAPTER = "api.adapters.ProwlerSocialAccountAdapter"
|
||||
|
||||
|
||||
# def inline(pem: str) -> str:
|
||||
# return "".join(
|
||||
# line.strip()
|
||||
# for line in pem.splitlines()
|
||||
# if "CERTIFICATE" not in line and "KEY" not in line
|
||||
# )
|
||||
|
||||
|
||||
# # SAML keys (TODO: Validate certificates)
|
||||
# SAML_PUBLIC_CERT = inline(env("SAML_PUBLIC_CERT", default=""))
|
||||
# SAML_PRIVATE_KEY = inline(env("SAML_PRIVATE_KEY", default=""))
|
||||
|
||||
SOCIALACCOUNT_PROVIDERS = {
|
||||
"google": {
|
||||
"APP": {
|
||||
@@ -63,20 +50,4 @@ SOCIALACCOUNT_PROVIDERS = {
|
||||
"read:org",
|
||||
],
|
||||
},
|
||||
"saml": {
|
||||
"use_nameid_for_email": True,
|
||||
"sp": {
|
||||
"entity_id": "urn:prowler.com:sp",
|
||||
},
|
||||
"advanced": {
|
||||
# TODO: Validate certificates
|
||||
# "x509cert": SAML_PUBLIC_CERT,
|
||||
# "private_key": SAML_PRIVATE_KEY,
|
||||
# "authn_request_signed": True,
|
||||
# "want_message_signed": True,
|
||||
# "want_assertion_signed": True,
|
||||
"reject_idp_initiated_sso": False,
|
||||
"name_id_format": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import MagicMock, patch
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from allauth.socialaccount.models import SocialLogin
|
||||
from django.conf import settings
|
||||
from django.db import connection as django_connection
|
||||
from django.db import connections as django_connections
|
||||
@@ -16,27 +15,20 @@ from tasks.jobs.backfill import backfill_resource_scan_summaries
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
ComplianceRequirementOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
IntegrationProviderRelationship,
|
||||
Invitation,
|
||||
LighthouseConfiguration,
|
||||
Membership,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
ResourceTagMapping,
|
||||
Role,
|
||||
SAMLConfiguration,
|
||||
SAMLDomainIndex,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
@@ -46,19 +38,12 @@ from api.v1.serializers import TokenSerializer
|
||||
from prowler.lib.check.models import Severity
|
||||
from prowler.lib.outputs.finding import Status
|
||||
|
||||
TODAY = str(datetime.today().date())
|
||||
API_JSON_CONTENT_TYPE = "application/vnd.api+json"
|
||||
NO_TENANT_HTTP_STATUS = status.HTTP_401_UNAUTHORIZED
|
||||
TEST_USER = "dev@prowler.com"
|
||||
TEST_PASSWORD = "testing_psswd"
|
||||
|
||||
|
||||
def today_after_n_days(n_days: int) -> str:
|
||||
return datetime.strftime(
|
||||
datetime.today().date() + timedelta(days=n_days), "%Y-%m-%d"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def enforce_test_user_db_connection(django_db_setup, django_db_blocker):
|
||||
"""Ensure tests use the test user for database connections."""
|
||||
@@ -390,27 +375,8 @@ def providers_fixture(tenants_fixture):
|
||||
tenant_id=tenant.id,
|
||||
scanner_args={"key1": "value1", "key2": {"key21": "value21"}},
|
||||
)
|
||||
provider6 = Provider.objects.create(
|
||||
provider="m365",
|
||||
uid="m365.test.com",
|
||||
alias="m365_testing",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
return provider1, provider2, provider3, provider4, provider5, provider6
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def processor_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
processor = Processor.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
processor_type="mutelist",
|
||||
configuration="Mutelist:\n Accounts:\n *:\n Checks:\n iam_user_hardware_mfa_enabled:\n "
|
||||
" Regions:\n - *\n Resources:\n - *",
|
||||
)
|
||||
|
||||
return processor
|
||||
return provider1, provider2, provider3, provider4, provider5
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -662,7 +628,6 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description apple sauce",
|
||||
"servicename": "ec2",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
@@ -689,7 +654,6 @@ def findings_fixture(scans_fixture, resources_fixture):
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description orange juice",
|
||||
"servicename": "s3",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
muted=True,
|
||||
@@ -813,131 +777,6 @@ def compliance_overviews_fixture(scans_fixture, tenants_fixture):
|
||||
return compliance_overview1, compliance_overview2
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def compliance_requirements_overviews_fixture(scans_fixture, tenants_fixture):
|
||||
"""Fixture for ComplianceRequirementOverview objects used by the new ComplianceOverviewViewSet."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan1, scan2, scan3 = scans_fixture
|
||||
|
||||
# Create ComplianceRequirementOverview objects for scan1
|
||||
requirement_overview1 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding",
|
||||
region="eu-west-1",
|
||||
requirement_id="requirement1",
|
||||
requirement_status=StatusChoices.PASS,
|
||||
passed_checks=2,
|
||||
failed_checks=0,
|
||||
total_checks=2,
|
||||
)
|
||||
|
||||
requirement_overview2 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding",
|
||||
region="eu-west-1",
|
||||
requirement_id="requirement2",
|
||||
requirement_status=StatusChoices.PASS,
|
||||
passed_checks=2,
|
||||
failed_checks=0,
|
||||
total_checks=2,
|
||||
)
|
||||
|
||||
requirement_overview3 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding",
|
||||
region="eu-west-2",
|
||||
requirement_id="requirement1",
|
||||
requirement_status=StatusChoices.PASS,
|
||||
passed_checks=2,
|
||||
failed_checks=0,
|
||||
total_checks=2,
|
||||
)
|
||||
|
||||
requirement_overview4 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding",
|
||||
region="eu-west-2",
|
||||
requirement_id="requirement2",
|
||||
requirement_status=StatusChoices.FAIL,
|
||||
passed_checks=1,
|
||||
failed_checks=1,
|
||||
total_checks=2,
|
||||
)
|
||||
|
||||
requirement_overview5 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding (MANUAL)",
|
||||
region="eu-west-2",
|
||||
requirement_id="requirement3",
|
||||
requirement_status=StatusChoices.MANUAL,
|
||||
passed_checks=0,
|
||||
failed_checks=0,
|
||||
total_checks=0,
|
||||
)
|
||||
|
||||
# Create a different compliance framework for testing
|
||||
requirement_overview6 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="cis_1.4_aws",
|
||||
framework="CIS-1.4-AWS",
|
||||
version="1.4",
|
||||
description="CIS AWS Foundations Benchmark v1.4.0",
|
||||
region="eu-west-1",
|
||||
requirement_id="cis_requirement1",
|
||||
requirement_status=StatusChoices.FAIL,
|
||||
passed_checks=0,
|
||||
failed_checks=3,
|
||||
total_checks=3,
|
||||
)
|
||||
|
||||
# Create another compliance framework for testing MITRE ATT&CK
|
||||
requirement_overview7 = ComplianceRequirementOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="mitre_attack_aws",
|
||||
framework="MITRE-ATTACK",
|
||||
version="1.0",
|
||||
description="MITRE ATT&CK",
|
||||
region="eu-west-1",
|
||||
requirement_id="mitre_requirement1",
|
||||
requirement_status=StatusChoices.FAIL,
|
||||
passed_checks=0,
|
||||
failed_checks=0,
|
||||
total_checks=0,
|
||||
)
|
||||
|
||||
return (
|
||||
requirement_overview1,
|
||||
requirement_overview2,
|
||||
requirement_overview3,
|
||||
requirement_overview4,
|
||||
requirement_overview5,
|
||||
requirement_overview6,
|
||||
requirement_overview7,
|
||||
)
|
||||
|
||||
|
||||
def get_api_tokens(
|
||||
api_client, user_email: str, user_password: str, tenant_id: str = None
|
||||
) -> tuple[str, str]:
|
||||
@@ -1065,7 +904,7 @@ def integrations_fixture(providers_fixture):
|
||||
enabled=True,
|
||||
connected=True,
|
||||
integration_type="amazon_s3",
|
||||
configuration={"key": "value1"},
|
||||
configuration={"key": "value"},
|
||||
credentials={"psswd": "1234"},
|
||||
)
|
||||
IntegrationProviderRelationship.objects.create(
|
||||
@@ -1090,20 +929,6 @@ def backfill_scan_metadata_fixture(scans_fixture, findings_fixture):
|
||||
backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def lighthouse_config_fixture(authenticated_client, tenants_fixture):
|
||||
return LighthouseConfiguration.objects.create(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
name="OpenAI",
|
||||
api_key_decoded="sk-test1234567890T3BlbkFJtest1234567890",
|
||||
model="gpt-4o",
|
||||
temperature=0,
|
||||
max_tokens=4000,
|
||||
business_context="Test business context",
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def latest_scan_finding(authenticated_client, providers_fixture, resources_fixture):
|
||||
provider = providers_fixture[0]
|
||||
@@ -1145,127 +970,6 @@ def latest_scan_finding(authenticated_client, providers_fixture, resources_fixtu
|
||||
return finding
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def latest_scan_resource(authenticated_client, providers_fixture):
|
||||
provider = providers_fixture[0]
|
||||
tenant_id = str(providers_fixture[0].tenant_id)
|
||||
scan = Scan.objects.create(
|
||||
name="latest completed scan for resource",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
resource = Resource.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
uid="latest_resource_uid",
|
||||
name="Latest Resource",
|
||||
region="us-east-1",
|
||||
service="ec2",
|
||||
type="instance",
|
||||
metadata='{"test": "metadata"}',
|
||||
details='{"test": "details"}',
|
||||
)
|
||||
|
||||
resource_tag = ResourceTag.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
key="environment",
|
||||
value="test",
|
||||
)
|
||||
ResourceTagMapping.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
resource=resource,
|
||||
tag=resource_tag,
|
||||
)
|
||||
|
||||
finding = Finding.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
uid="test_finding_uid_latest",
|
||||
scan=scan,
|
||||
delta="new",
|
||||
status=Status.FAIL,
|
||||
status_extended="test status extended ",
|
||||
impact=Severity.critical,
|
||||
impact_extended="test impact extended",
|
||||
severity=Severity.critical,
|
||||
raw_result={
|
||||
"status": Status.FAIL,
|
||||
"impact": Severity.critical,
|
||||
"severity": Severity.critical,
|
||||
},
|
||||
tags={"test": "latest"},
|
||||
check_id="test_check_id_latest",
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id_latest",
|
||||
"Description": "test description latest",
|
||||
},
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
finding.add_resources([resource])
|
||||
|
||||
backfill_resource_scan_summaries(tenant_id, str(scan.id))
|
||||
return resource
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def saml_setup(tenants_fixture):
|
||||
tenant_id = tenants_fixture[0].id
|
||||
domain = "prowler.com"
|
||||
|
||||
SAMLDomainIndex.objects.create(email_domain=domain, tenant_id=tenant_id)
|
||||
|
||||
metadata_xml = """<?xml version='1.0' encoding='UTF-8'?>
|
||||
<md:EntityDescriptor entityID='TEST' xmlns:md='urn:oasis:names:tc:SAML:2.0:metadata'>
|
||||
<md:IDPSSODescriptor WantAuthnRequestsSigned='false' protocolSupportEnumeration='urn:oasis:names:tc:SAML:2.0:protocol'>
|
||||
<md:KeyDescriptor use='signing'>
|
||||
<ds:KeyInfo xmlns:ds='http://www.w3.org/2000/09/xmldsig#'>
|
||||
<ds:X509Data>
|
||||
<ds:X509Certificate>TEST</ds:X509Certificate>
|
||||
</ds:X509Data>
|
||||
</ds:KeyInfo>
|
||||
</md:KeyDescriptor>
|
||||
<md:NameIDFormat>urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress</md:NameIDFormat>
|
||||
<md:SingleSignOnService Binding='urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST' Location='https://TEST/sso/saml'/>
|
||||
<md:SingleSignOnService Binding='urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect' Location='https://TEST/sso/saml'/>
|
||||
</md:IDPSSODescriptor>
|
||||
</md:EntityDescriptor>
|
||||
"""
|
||||
SAMLConfiguration.objects.create(
|
||||
tenant_id=str(tenant_id),
|
||||
email_domain=domain,
|
||||
metadata_xml=metadata_xml,
|
||||
)
|
||||
|
||||
return {
|
||||
"email": f"user@{domain}",
|
||||
"domain": domain,
|
||||
"tenant_id": tenant_id,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def saml_sociallogin(users_fixture):
|
||||
user = users_fixture[0]
|
||||
user.email = "samlsso@acme.com"
|
||||
extra_data = {
|
||||
"firstName": ["Test"],
|
||||
"lastName": ["User"],
|
||||
"organization": ["Prowler"],
|
||||
"userType": ["member"],
|
||||
}
|
||||
|
||||
account = MagicMock()
|
||||
account.provider = "saml"
|
||||
account.extra_data = extra_data
|
||||
|
||||
sociallogin = MagicMock(spec=SocialLogin)
|
||||
sociallogin.account = account
|
||||
sociallogin.user = user
|
||||
|
||||
return sociallogin
|
||||
|
||||
|
||||
def get_authorization_header(access_token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
|
||||
@@ -3,12 +3,6 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# Suppress specific warnings from django-rest-auth: https://github.com/iMerica/dj-rest-auth/issues/684
|
||||
warnings.filterwarnings(
|
||||
"ignore", category=UserWarning, module="dj_rest_auth.registration.serializers"
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
@@ -2,10 +2,10 @@ import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from tasks.tasks import perform_scheduled_scan_task
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.exceptions import ConflictException
|
||||
from api.models import Provider, Scan, StateChoices
|
||||
|
||||
|
||||
@@ -24,9 +24,15 @@ def schedule_provider_scan(provider_instance: Provider):
|
||||
if PeriodicTask.objects.filter(
|
||||
interval=schedule, name=task_name, task="scan-perform-scheduled"
|
||||
).exists():
|
||||
raise ConflictException(
|
||||
detail="There is already a scheduled scan for this provider.",
|
||||
pointer="/data/attributes/provider_id",
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "There is already a scheduled scan for this provider.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/provider_id"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import openai
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
from api.models import Integration, LighthouseConfiguration, Provider
|
||||
from api.utils import (
|
||||
prowler_integration_connection_test,
|
||||
prowler_provider_connection_test,
|
||||
)
|
||||
from api.models import Provider
|
||||
from api.utils import prowler_provider_connection_test
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
@@ -43,78 +39,3 @@ def check_provider_connection(provider_id: str):
|
||||
|
||||
connection_error = f"{connection_result.error}" if connection_result.error else None
|
||||
return {"connected": connection_result.is_connected, "error": connection_error}
|
||||
|
||||
|
||||
def check_lighthouse_connection(lighthouse_config_id: str):
|
||||
"""
|
||||
Business logic to check the connection status of a Lighthouse configuration.
|
||||
|
||||
Args:
|
||||
lighthouse_config_id (str): The primary key of the LighthouseConfiguration instance to check.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing:
|
||||
- 'connected' (bool): Indicates whether the connection is successful.
|
||||
- 'error' (str or None): The error message if the connection failed, otherwise `None`.
|
||||
- 'available_models' (list): List of available models if connection is successful.
|
||||
|
||||
Raises:
|
||||
Model.DoesNotExist: If the lighthouse configuration does not exist.
|
||||
"""
|
||||
lighthouse_config = LighthouseConfiguration.objects.get(pk=lighthouse_config_id)
|
||||
|
||||
if not lighthouse_config.api_key_decoded:
|
||||
lighthouse_config.is_active = False
|
||||
lighthouse_config.save()
|
||||
return {
|
||||
"connected": False,
|
||||
"error": "API key is invalid or missing.",
|
||||
"available_models": [],
|
||||
}
|
||||
|
||||
try:
|
||||
client = openai.OpenAI(api_key=lighthouse_config.api_key_decoded)
|
||||
models = client.models.list()
|
||||
lighthouse_config.is_active = True
|
||||
lighthouse_config.save()
|
||||
return {
|
||||
"connected": True,
|
||||
"error": None,
|
||||
"available_models": [model.id for model in models.data],
|
||||
}
|
||||
except Exception as e:
|
||||
lighthouse_config.is_active = False
|
||||
lighthouse_config.save()
|
||||
return {"connected": False, "error": str(e), "available_models": []}
|
||||
|
||||
|
||||
def check_integration_connection(integration_id: str):
|
||||
"""
|
||||
Business logic to check the connection status of an integration.
|
||||
|
||||
Args:
|
||||
integration_id (str): The primary key of the Integration instance to check.
|
||||
"""
|
||||
integration = Integration.objects.filter(pk=integration_id, enabled=True).first()
|
||||
|
||||
if not integration:
|
||||
logger.info(f"Integration {integration_id} is not enabled")
|
||||
return {"connected": False, "error": "Integration is not enabled"}
|
||||
|
||||
try:
|
||||
result = prowler_integration_connection_test(integration)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Unexpected exception checking {integration.integration_type} integration connection: {str(e)}"
|
||||
)
|
||||
raise e
|
||||
|
||||
# Update integration connection status
|
||||
integration.connected = result.is_connected
|
||||
integration.connection_last_checked_at = datetime.now(tz=timezone.utc)
|
||||
integration.save()
|
||||
|
||||
return {
|
||||
"connected": result.is_connected,
|
||||
"error": str(result.error) if result.error else None,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import os
|
||||
import re
|
||||
import zipfile
|
||||
|
||||
import boto3
|
||||
@@ -8,12 +7,11 @@ from botocore.exceptions import ClientError, NoCredentialsError, ParamValidation
|
||||
from celery.utils.log import get_task_logger
|
||||
from django.conf import settings
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Scan
|
||||
from prowler.config.config import (
|
||||
csv_file_suffix,
|
||||
html_file_suffix,
|
||||
json_ocsf_file_suffix,
|
||||
output_file_timestamp,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
|
||||
AWSWellArchitected,
|
||||
@@ -21,7 +19,6 @@ from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected im
|
||||
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_github import GithubCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_m365 import M365CIS
|
||||
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
|
||||
@@ -33,7 +30,6 @@ from prowler.lib.outputs.compliance.iso27001.iso27001_gcp import GCPISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_kubernetes import (
|
||||
KubernetesISO27001,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_m365 import M365ISO27001
|
||||
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
|
||||
@@ -93,10 +89,6 @@ COMPLIANCE_CLASS_MAP = {
|
||||
"m365": [
|
||||
(lambda name: name.startswith("cis_"), M365CIS),
|
||||
(lambda name: name == "prowler_threatscore_m365", ProwlerThreatScoreM365),
|
||||
(lambda name: name.startswith("iso27001_"), M365ISO27001),
|
||||
],
|
||||
"github": [
|
||||
(lambda name: name.startswith("cis_"), GithubCIS),
|
||||
],
|
||||
}
|
||||
|
||||
@@ -172,7 +164,7 @@ def get_s3_client():
|
||||
return s3_client
|
||||
|
||||
|
||||
def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str | None:
|
||||
def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str:
|
||||
"""
|
||||
Upload the specified ZIP file to an S3 bucket.
|
||||
If the S3 bucket environment variables are not configured,
|
||||
@@ -189,7 +181,7 @@ def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str | None:
|
||||
"""
|
||||
bucket = base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET
|
||||
if not bucket:
|
||||
return
|
||||
return None
|
||||
|
||||
try:
|
||||
s3 = get_s3_client()
|
||||
@@ -246,22 +238,15 @@ def _generate_output_directory(
|
||||
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56',
|
||||
'/tmp/tenant-1234/aws/scan-5678/compliance/prowler-output-2023-02-15T12:34:56'
|
||||
"""
|
||||
# Sanitize the prowler provider name to ensure it is a valid directory name
|
||||
prowler_provider_sanitized = re.sub(r"[^\w\-]", "-", prowler_provider)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
started_at = Scan.objects.get(id=scan_id).started_at
|
||||
|
||||
timestamp = started_at.strftime("%Y%m%d%H%M%S")
|
||||
path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/prowler-output-"
|
||||
f"{prowler_provider_sanitized}-{timestamp}"
|
||||
f"{prowler_provider}-{output_file_timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
compliance_path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/compliance/prowler-output-"
|
||||
f"{prowler_provider_sanitized}-{timestamp}"
|
||||
f"{prowler_provider}-{output_file_timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(compliance_path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
|
||||
@@ -1,156 +0,0 @@
|
||||
import os
|
||||
from glob import glob
|
||||
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Integration
|
||||
from prowler.lib.outputs.asff.asff import ASFF
|
||||
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
|
||||
from prowler.lib.outputs.csv.csv import CSV
|
||||
from prowler.lib.outputs.html.html import HTML
|
||||
from prowler.lib.outputs.ocsf.ocsf import OCSF
|
||||
from prowler.providers.aws.lib.s3.s3 import S3
|
||||
from prowler.providers.common.models import Connection
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def get_s3_client_from_integration(
|
||||
integration: Integration,
|
||||
) -> tuple[bool, S3 | Connection]:
|
||||
"""
|
||||
Create and return a boto3 S3 client using AWS credentials from an integration.
|
||||
|
||||
Args:
|
||||
integration (Integration): The integration to get the S3 client from.
|
||||
|
||||
Returns:
|
||||
tuple[bool, S3 | Connection]: A tuple containing a boolean indicating if the connection was successful and the S3 client or connection object.
|
||||
"""
|
||||
s3 = S3(
|
||||
**integration.credentials,
|
||||
bucket_name=integration.configuration["bucket_name"],
|
||||
output_directory=integration.configuration["output_directory"],
|
||||
)
|
||||
|
||||
connection = s3.test_connection(
|
||||
**integration.credentials,
|
||||
bucket_name=integration.configuration["bucket_name"],
|
||||
)
|
||||
|
||||
if connection.is_connected:
|
||||
return True, s3
|
||||
|
||||
return False, connection
|
||||
|
||||
|
||||
def upload_s3_integration(
|
||||
tenant_id: str, provider_id: str, output_directory: str
|
||||
) -> bool:
|
||||
"""
|
||||
Upload the specified output files to an S3 bucket from an integration.
|
||||
Reconstructs output objects from files in the output directory instead of using serialized data.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant identifier, used as part of the S3 key prefix.
|
||||
provider_id (str): The provider identifier, used as part of the S3 key prefix.
|
||||
output_directory (str): Path to the directory containing output files.
|
||||
|
||||
Returns:
|
||||
bool: True if all integrations were executed, False otherwise.
|
||||
|
||||
Raises:
|
||||
botocore.exceptions.ClientError: If the upload attempt to S3 fails for any reason.
|
||||
"""
|
||||
logger.info(f"Processing S3 integrations for provider {provider_id}")
|
||||
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
integrations = list(
|
||||
Integration.objects.filter(
|
||||
integrationproviderrelationship__provider_id=provider_id,
|
||||
integration_type=Integration.IntegrationChoices.AMAZON_S3,
|
||||
enabled=True,
|
||||
)
|
||||
)
|
||||
|
||||
if not integrations:
|
||||
logger.error(f"No S3 integrations found for provider {provider_id}")
|
||||
return False
|
||||
|
||||
integration_executions = 0
|
||||
for integration in integrations:
|
||||
try:
|
||||
connected, s3 = get_s3_client_from_integration(integration)
|
||||
except Exception as e:
|
||||
logger.info(
|
||||
f"S3 connection failed for integration {integration.id}: {e}"
|
||||
)
|
||||
integration.connected = False
|
||||
integration.save()
|
||||
continue
|
||||
|
||||
if connected:
|
||||
try:
|
||||
# Reconstruct generated_outputs from files in output directory
|
||||
# This approach scans the output directory for files and creates the appropriate
|
||||
# output objects based on file extensions and naming patterns.
|
||||
generated_outputs = {"regular": [], "compliance": []}
|
||||
|
||||
# Find and recreate regular outputs (CSV, HTML, OCSF)
|
||||
output_file_patterns = {
|
||||
".csv": CSV,
|
||||
".html": HTML,
|
||||
".ocsf.json": OCSF,
|
||||
".asff.json": ASFF,
|
||||
}
|
||||
|
||||
base_dir = os.path.dirname(output_directory)
|
||||
for extension, output_class in output_file_patterns.items():
|
||||
pattern = f"{output_directory}*{extension}"
|
||||
for file_path in glob(pattern):
|
||||
if os.path.exists(file_path):
|
||||
output = output_class(findings=[], file_path=file_path)
|
||||
output.create_file_descriptor(file_path)
|
||||
generated_outputs["regular"].append(output)
|
||||
|
||||
# Find and recreate compliance outputs
|
||||
compliance_pattern = os.path.join(base_dir, "compliance", "*.csv")
|
||||
for file_path in glob(compliance_pattern):
|
||||
if os.path.exists(file_path):
|
||||
output = GenericCompliance(
|
||||
findings=[],
|
||||
compliance=None,
|
||||
file_path=file_path,
|
||||
file_extension=".csv",
|
||||
)
|
||||
output.create_file_descriptor(file_path)
|
||||
generated_outputs["compliance"].append(output)
|
||||
|
||||
# Use send_to_bucket with recreated generated_outputs objects
|
||||
s3.send_to_bucket(generated_outputs)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"S3 upload failed for integration {integration.id}: {e}"
|
||||
)
|
||||
continue
|
||||
integration_executions += 1
|
||||
else:
|
||||
integration.connected = False
|
||||
integration.save()
|
||||
logger.error(
|
||||
f"S3 upload failed, connection failed for integration {integration.id}: {s3.error}"
|
||||
)
|
||||
|
||||
result = integration_executions == len(integrations)
|
||||
if result:
|
||||
logger.info(
|
||||
f"All the S3 integrations completed successfully for provider {provider_id}"
|
||||
)
|
||||
else:
|
||||
logger.info(f"Some S3 integrations failed for provider {provider_id}")
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"S3 integrations failed for provider {provider_id}: {str(e)}")
|
||||
return False
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user