mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
1523 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
632cc19fa2 | ||
|
|
575bb8cd2c | ||
|
|
fcc25451d8 | ||
|
|
6c04592e7e | ||
|
|
73f9811f42 | ||
|
|
c89673a01e | ||
|
|
2c7b10d71a | ||
|
|
9ea500009b | ||
|
|
5f92e33a54 | ||
|
|
d6b5d8a919 | ||
|
|
b3d5f7b848 | ||
|
|
a96aa9a3f6 | ||
|
|
ff3cd0f51b | ||
|
|
c208948521 | ||
|
|
f1d5f73d40 | ||
|
|
1cc09b81f9 | ||
|
|
56ef1a4f87 | ||
|
|
0f75c2a24f | ||
|
|
b7c317bf23 | ||
|
|
54aa1a4507 | ||
|
|
0d4bd8c0a0 | ||
|
|
1c040b8e41 | ||
|
|
c3b36720dc | ||
|
|
af6b0833a1 | ||
|
|
8817d08a92 | ||
|
|
78d9508862 | ||
|
|
dc543b2c89 | ||
|
|
0025c99fb9 | ||
|
|
d3f12075e9 | ||
|
|
513bf6bca7 | ||
|
|
7459fe9556 | ||
|
|
2b06f0115e | ||
|
|
2e134815d3 | ||
|
|
118a1c1138 | ||
|
|
1b35a72915 | ||
|
|
2d7b110b1b | ||
|
|
7c00c949e6 | ||
|
|
8fcbcda15c | ||
|
|
fb33506f4a | ||
|
|
e43b572d5e | ||
|
|
ff22e13e24 | ||
|
|
c4946a8938 | ||
|
|
0e51ee9c7c | ||
|
|
22a5776ec0 | ||
|
|
108983e959 | ||
|
|
635b3f1978 | ||
|
|
dc3d5149e9 | ||
|
|
cffa560c9d | ||
|
|
80c8cb9b6c | ||
|
|
dbffcedc49 | ||
|
|
5d4191a7fc | ||
|
|
ab6d05637d | ||
|
|
75b3c02811 | ||
|
|
e25ff209b3 | ||
|
|
81cf5303a1 | ||
|
|
087ac5b53a | ||
|
|
fb429c9e23 | ||
|
|
284cd66ed6 | ||
|
|
3ae2c4a225 | ||
|
|
2d270ace7f | ||
|
|
aaeb71a563 | ||
|
|
737abe83c8 | ||
|
|
a78c5499c9 | ||
|
|
e50d779e34 | ||
|
|
be2965d274 | ||
|
|
0bdaeff745 | ||
|
|
ad25a8fe82 | ||
|
|
aebc89c17c | ||
|
|
8fac7ad44d | ||
|
|
b0f5d6718f | ||
|
|
83dfa8ae45 | ||
|
|
d929e293b5 | ||
|
|
87c4361559 | ||
|
|
de36bddccf | ||
|
|
97dac23d39 | ||
|
|
64082b5038 | ||
|
|
2f6e83ad0c | ||
|
|
24e0a175b5 | ||
|
|
7c1ae956d5 | ||
|
|
896c466889 | ||
|
|
e2fd3f14ed | ||
|
|
8239e2cd09 | ||
|
|
9e21348cdd | ||
|
|
62672a98f2 | ||
|
|
08f52ee668 | ||
|
|
253847e3cd | ||
|
|
8449728df1 | ||
|
|
ba9d0cd9c2 | ||
|
|
1981173e75 | ||
|
|
1f250dccb7 | ||
|
|
4a8f6070d3 | ||
|
|
87f89e68eb | ||
|
|
87cb33ba57 | ||
|
|
83cb2ed297 | ||
|
|
57ff41db3e | ||
|
|
3ad376fe07 | ||
|
|
9f76c47c85 | ||
|
|
c6c46b0f23 | ||
|
|
acb98372c7 | ||
|
|
1998054680 | ||
|
|
bb94ede69f | ||
|
|
676133c14d | ||
|
|
00e33d39bb | ||
|
|
311c9a41ff | ||
|
|
86b6732013 | ||
|
|
f851a90cb0 | ||
|
|
c8983440f1 | ||
|
|
ca21d8ceae | ||
|
|
c07a531c3b | ||
|
|
b0e3511351 | ||
|
|
1c3d5b5f69 | ||
|
|
e38a2f47b3 | ||
|
|
b4dab02f5a | ||
|
|
a48fafc277 | ||
|
|
109c23ba69 | ||
|
|
2fe98ae6ce | ||
|
|
d56744844a | ||
|
|
cd21c6980b | ||
|
|
ddc0c84fb2 | ||
|
|
26040f04ad | ||
|
|
bf7b2d7c8f | ||
|
|
5df1e163ee | ||
|
|
a1326022e5 | ||
|
|
4f29743604 | ||
|
|
ff9c992bf8 | ||
|
|
8eb9d17006 | ||
|
|
7a6ed613d5 | ||
|
|
98e3c4a105 | ||
|
|
010457ef09 | ||
|
|
5d5e4530b3 | ||
|
|
fe5694fd39 | ||
|
|
7c614ef160 | ||
|
|
36df42cf64 | ||
|
|
09be60948d | ||
|
|
a8b102794c | ||
|
|
01ed65fcf8 | ||
|
|
d7b024b460 | ||
|
|
45f1b4aeda | ||
|
|
1e79112ea0 | ||
|
|
7a27a8ddf5 | ||
|
|
d8e575e6dc | ||
|
|
f7ed5bd365 | ||
|
|
1c5348d846 | ||
|
|
2bde25a471 | ||
|
|
c1b70ed0fc | ||
|
|
575ef41d4f | ||
|
|
8f19bfda0d | ||
|
|
fefdce129b | ||
|
|
f85abf90fb | ||
|
|
855cc17a23 | ||
|
|
d489c80857 | ||
|
|
b81e12f697 | ||
|
|
c02b9073d1 | ||
|
|
2ce4d72111 | ||
|
|
eec0b45b32 | ||
|
|
f4746a1b09 | ||
|
|
d4c23efee3 | ||
|
|
6d7711ca1c | ||
|
|
a7f733522c | ||
|
|
e2dfc1f383 | ||
|
|
163ae5d79d | ||
|
|
ff8c33ecf8 | ||
|
|
cf4e8e940d | ||
|
|
763e88edb5 | ||
|
|
c301fbd8b3 | ||
|
|
5121015586 | ||
|
|
b750b6492d | ||
|
|
da656b5086 | ||
|
|
c82437c32c | ||
|
|
22c4216ed6 | ||
|
|
07d0f72239 | ||
|
|
6d14b5619c | ||
|
|
f12c7000bb | ||
|
|
ec06fd0bf4 | ||
|
|
92911d2c0b | ||
|
|
985bfc1618 | ||
|
|
d34c1556a1 | ||
|
|
c0d4f43310 | ||
|
|
0b2dac83bd | ||
|
|
b3d7bb4e8d | ||
|
|
71f8bcefa8 | ||
|
|
a11b338994 | ||
|
|
c7c8fc90b2 | ||
|
|
2bd32f6e8f | ||
|
|
32c8cc723e | ||
|
|
c298541d8d | ||
|
|
dcf53ea357 | ||
|
|
3c9ae06086 | ||
|
|
735a8fbb95 | ||
|
|
7442eb398c | ||
|
|
509f185890 | ||
|
|
7712968eeb | ||
|
|
fc3ee5534d | ||
|
|
376ec4c73b | ||
|
|
3db0a0ed6f | ||
|
|
c3e5980eb0 | ||
|
|
c830829b55 | ||
|
|
3b21b7ecf0 | ||
|
|
a3810f3fda | ||
|
|
18fc405d15 | ||
|
|
aeb3bdc779 | ||
|
|
b32f0997f5 | ||
|
|
a72d8d7c83 | ||
|
|
a4f5566589 | ||
|
|
bd839e1398 | ||
|
|
7e9b1630f0 | ||
|
|
e2330acbff | ||
|
|
0f15a20128 | ||
|
|
237ec20513 | ||
|
|
497adbb4e3 | ||
|
|
1bb939c609 | ||
|
|
8ee73af0c0 | ||
|
|
6ead888399 | ||
|
|
c20f9edb0b | ||
|
|
747d62393e | ||
|
|
e757bde7ca | ||
|
|
29ff19eead | ||
|
|
c516773cff | ||
|
|
e4a682e23e | ||
|
|
d427455db5 | ||
|
|
f0cbcacbe4 | ||
|
|
df11afbcf0 | ||
|
|
cc6db6b680 | ||
|
|
0eb51dc147 | ||
|
|
aeb3dc9ac2 | ||
|
|
36c8240f2b | ||
|
|
a0852c397d | ||
|
|
32f1d1a713 | ||
|
|
76be3bd4ae | ||
|
|
93b59e15e8 | ||
|
|
f559b87018 | ||
|
|
84b50f2a3f | ||
|
|
69c7dc339f | ||
|
|
ea396a90e3 | ||
|
|
d460509262 | ||
|
|
4d2bb93a8c | ||
|
|
d936c3f934 | ||
|
|
c1e36f0df7 | ||
|
|
d1f04fefb0 | ||
|
|
8703c55b5a | ||
|
|
0af5c9c9c2 | ||
|
|
57717467c7 | ||
|
|
b59930803c | ||
|
|
835272b3ab | ||
|
|
6a67d8d93a | ||
|
|
f87fd35c95 | ||
|
|
7a00750e0a | ||
|
|
412697c418 | ||
|
|
3cd4e4cbbf | ||
|
|
e18cace3bb | ||
|
|
a1252e7afc | ||
|
|
7bc9aa2424 | ||
|
|
c4f87f45d4 | ||
|
|
93f3a094fc | ||
|
|
881133d6b9 | ||
|
|
ba02a26cc1 | ||
|
|
bb1b81455c | ||
|
|
3116fef89a | ||
|
|
f6dbf3ef5f | ||
|
|
35a9c03467 | ||
|
|
5fc95a879c | ||
|
|
8ffa958c20 | ||
|
|
476cc59ca1 | ||
|
|
26390cdcd9 | ||
|
|
95b8390ddd | ||
|
|
e9aebbd269 | ||
|
|
11ffbd86eb | ||
|
|
9f32ff0c10 | ||
|
|
33046eb95d | ||
|
|
a77d6ebaa2 | ||
|
|
f1fa79b7ae | ||
|
|
72874201c1 | ||
|
|
d979076814 | ||
|
|
2bdc6fef26 | ||
|
|
5bfcdbb0b9 | ||
|
|
8812566c83 | ||
|
|
fab7a107fa | ||
|
|
cbd5197ddc | ||
|
|
0b87b63cdd | ||
|
|
18d7ce3607 | ||
|
|
8f20eb958d | ||
|
|
33ab90e119 | ||
|
|
d4c59bf4fe | ||
|
|
5e45b30823 | ||
|
|
77f15a8749 | ||
|
|
953fc0590b | ||
|
|
8ed6e497a3 | ||
|
|
10c3a1e3ce | ||
|
|
32dec0b235 | ||
|
|
bf50c5e7b0 | ||
|
|
a660cf7024 | ||
|
|
50851aec64 | ||
|
|
b931a81692 | ||
|
|
7ce291d0d3 | ||
|
|
d257e6368e | ||
|
|
9f43391521 | ||
|
|
02099b793d | ||
|
|
56b760208e | ||
|
|
042f138f56 | ||
|
|
f347080dbd | ||
|
|
2cc8363697 | ||
|
|
154467e7c8 | ||
|
|
16c71f3c04 | ||
|
|
849707166a | ||
|
|
59513d777c | ||
|
|
f52929673d | ||
|
|
611681488d | ||
|
|
9630f23585 | ||
|
|
23cfd91708 | ||
|
|
0c44f28bf7 | ||
|
|
09c529d810 | ||
|
|
5be859d86c | ||
|
|
5340008c8f | ||
|
|
a5aa4c30d7 | ||
|
|
56d0c2fbea | ||
|
|
6b7ef199e0 | ||
|
|
933c5063ee | ||
|
|
72a998a692 | ||
|
|
a4d1e3bb69 | ||
|
|
2a8b04cced | ||
|
|
cfc02186d4 | ||
|
|
7e432a3b69 | ||
|
|
fed8de314f | ||
|
|
ce23c4b5aa | ||
|
|
33e99ef628 | ||
|
|
72761a6ef6 | ||
|
|
2a4fdff827 | ||
|
|
8e264313bc | ||
|
|
21654b0bc0 | ||
|
|
7fa57ba3e2 | ||
|
|
ea9b6bb191 | ||
|
|
95acb7b0c8 | ||
|
|
d23edfa337 | ||
|
|
40678a5863 | ||
|
|
23aded92a3 | ||
|
|
6e56d3862d | ||
|
|
d95fccd163 | ||
|
|
7ddf860a55 | ||
|
|
3f41c75a45 | ||
|
|
04b6dbf639 | ||
|
|
ff4d16deb5 | ||
|
|
562921cd5e | ||
|
|
8f061e4fed | ||
|
|
3fb86d754a | ||
|
|
7874707310 | ||
|
|
1c934e37c7 | ||
|
|
8459cff16d | ||
|
|
57ae096395 | ||
|
|
200185de25 | ||
|
|
f8447b0f79 | ||
|
|
19289bbe20 | ||
|
|
b5b371fa0c | ||
|
|
939a623cec | ||
|
|
926f449ae6 | ||
|
|
646668c6ae | ||
|
|
8e6b92792b | ||
|
|
65c081ce38 | ||
|
|
5600131d6a | ||
|
|
dbd271980f | ||
|
|
ff532a899e | ||
|
|
0c9675ec70 | ||
|
|
d45eda2b2b | ||
|
|
0abcf80d19 | ||
|
|
c0e10fd395 | ||
|
|
a80e9b26a8 | ||
|
|
2a9cd57fb8 | ||
|
|
a0ad1a5f49 | ||
|
|
dff22dd166 | ||
|
|
58138810b9 | ||
|
|
1ecc272fe4 | ||
|
|
b784167006 | ||
|
|
cf0ec8dea0 | ||
|
|
96cae5e961 | ||
|
|
a48e5cb15f | ||
|
|
5a9ff007e0 | ||
|
|
24c45f894c | ||
|
|
5d03c85629 | ||
|
|
41dc397a7a | ||
|
|
237a9adce9 | ||
|
|
a06167f1c2 | ||
|
|
a7d58c40dd | ||
|
|
e260c46389 | ||
|
|
115169a596 | ||
|
|
5b19173c1d | ||
|
|
d3dd1644e6 | ||
|
|
8ff0c59964 | ||
|
|
285939c389 | ||
|
|
a62ae8af51 | ||
|
|
5d78b9e439 | ||
|
|
1056c270ca | ||
|
|
eeef6600b7 | ||
|
|
e142f17abe | ||
|
|
a65d858dac | ||
|
|
6235a1ba41 | ||
|
|
05007d03ee | ||
|
|
102d099947 | ||
|
|
3194675a5c | ||
|
|
14e6e4aa68 | ||
|
|
b24c3665b5 | ||
|
|
1f60878867 | ||
|
|
2dd18662d8 | ||
|
|
175360dbe6 | ||
|
|
80e24b971f | ||
|
|
78877c470a | ||
|
|
9c9b100359 | ||
|
|
10f3232294 | ||
|
|
a2e5f70f36 | ||
|
|
8d8b31c757 | ||
|
|
cba1e718b9 | ||
|
|
6c3c37fc26 | ||
|
|
b610cacd0c | ||
|
|
027a5705cb | ||
|
|
b7fbfb4360 | ||
|
|
5acf0a7e3d | ||
|
|
3a25e86e30 | ||
|
|
50f1592eb3 | ||
|
|
0f2927cb88 | ||
|
|
b4e1434052 | ||
|
|
43710783f9 | ||
|
|
16f767e7b9 | ||
|
|
42818217a0 | ||
|
|
13405594b2 | ||
|
|
b5a3852334 | ||
|
|
181ff1acb3 | ||
|
|
91e59a3279 | ||
|
|
b3fad1a765 | ||
|
|
6f90927a79 | ||
|
|
0bb4a9a3e9 | ||
|
|
80d9cde60b | ||
|
|
11196c2f83 | ||
|
|
55a0d0a1b5 | ||
|
|
4e5e1d7bd4 | ||
|
|
06a0a434ab | ||
|
|
153833fc55 | ||
|
|
fc4877975f | ||
|
|
0797efd4fd | ||
|
|
fbec99a0b7 | ||
|
|
b2cb1de95e | ||
|
|
190c2316d7 | ||
|
|
f6c352281a | ||
|
|
66dfe89936 | ||
|
|
8b3942ca49 | ||
|
|
9d35213bd5 | ||
|
|
3e586e615d | ||
|
|
a4f950e093 | ||
|
|
7c2441f6ff | ||
|
|
0a92af3eb2 | ||
|
|
666f3a0e20 | ||
|
|
06ef98b5cc | ||
|
|
79125bdd40 | ||
|
|
e8e8b085ac | ||
|
|
c9b81d003a | ||
|
|
23fa3c1e38 | ||
|
|
03fbd0baca | ||
|
|
d4fe24ef47 | ||
|
|
9c5220ee98 | ||
|
|
6491bce5a6 | ||
|
|
ca375dd79c | ||
|
|
e807573b54 | ||
|
|
c0f4c9743f | ||
|
|
5974d0b5da | ||
|
|
6244a8a5f7 | ||
|
|
5b9dae4529 | ||
|
|
a424374c44 | ||
|
|
b7fc2542e8 | ||
|
|
83a1598a1e | ||
|
|
b22b56a06b | ||
|
|
5020e4713c | ||
|
|
ee534a740e | ||
|
|
48cb45b7a8 | ||
|
|
91b74822e9 | ||
|
|
287eef5085 | ||
|
|
45d359c84a | ||
|
|
6049e5d4e8 | ||
|
|
dfd377f89e | ||
|
|
37e6c52c14 | ||
|
|
d6a7f4d88f | ||
|
|
239cda0a90 | ||
|
|
4a821e425b | ||
|
|
e1a2f0c204 | ||
|
|
c70860c733 | ||
|
|
05e71e033f | ||
|
|
5164ec2eb9 | ||
|
|
be18dac4f9 | ||
|
|
bb126c242f | ||
|
|
e27780a856 | ||
|
|
196ec51751 | ||
|
|
86abf9e64c | ||
|
|
9d8be578e3 | ||
|
|
b3aa800082 | ||
|
|
501674a778 | ||
|
|
5ff6ae79d8 | ||
|
|
e518a869ab | ||
|
|
43927a62f3 | ||
|
|
335980c8d8 | ||
|
|
ca3ee378db | ||
|
|
c05bc1068a | ||
|
|
2e3164636d | ||
|
|
c34e07fc40 | ||
|
|
6022122a61 | ||
|
|
f65f5e4b46 | ||
|
|
dee17733a0 | ||
|
|
cddda1e64e | ||
|
|
f7b873db03 | ||
|
|
792bc70d0a | ||
|
|
185491b061 | ||
|
|
3af8a43480 | ||
|
|
fd78406b29 | ||
|
|
4758b258a3 | ||
|
|
015e2b3b88 | ||
|
|
e184c9cb61 | ||
|
|
9004a01183 | ||
|
|
dd65ba3d9e | ||
|
|
bba616a18f | ||
|
|
aa0f8d2981 | ||
|
|
2511d6ffa9 | ||
|
|
27329457be | ||
|
|
7189f3d526 | ||
|
|
58e7589c9d | ||
|
|
d60f4b5ded | ||
|
|
4c2ec094f6 | ||
|
|
395ecaff5b | ||
|
|
c39506ef7d | ||
|
|
eb90d479e2 | ||
|
|
b92a73f5ea | ||
|
|
ad121f3059 | ||
|
|
70e4c5a44e | ||
|
|
b5a46b7b59 | ||
|
|
f1a97cd166 | ||
|
|
0774508093 | ||
|
|
0664ce6b94 | ||
|
|
407c779c52 | ||
|
|
c60f13f23f | ||
|
|
37d912ef01 | ||
|
|
d3de89c017 | ||
|
|
cb22af25c6 | ||
|
|
a534b94495 | ||
|
|
6262b4ff0b | ||
|
|
84ecd7ab2c | ||
|
|
1a5428445a | ||
|
|
ac8e991ca0 | ||
|
|
83a0331472 | ||
|
|
cce31e2971 | ||
|
|
0adf7d6e77 | ||
|
|
295f8b557e | ||
|
|
bb2c5c3161 | ||
|
|
0018f36a36 | ||
|
|
857de84f49 | ||
|
|
9630f2242a | ||
|
|
1fe125867c | ||
|
|
0737893240 | ||
|
|
282fe3d348 | ||
|
|
b5d83640ae | ||
|
|
2823d3ad21 | ||
|
|
00b93bfe86 | ||
|
|
84c253d887 | ||
|
|
2ab5a702c9 | ||
|
|
11d9cdf24e | ||
|
|
b1de41619b | ||
|
|
18a4881a51 | ||
|
|
de76a168c0 | ||
|
|
bcc13a742d | ||
|
|
23b584f4bf | ||
|
|
074b7c1ff5 | ||
|
|
c04e9ed914 | ||
|
|
1f1b126e79 | ||
|
|
9b0dd80f13 | ||
|
|
b0807478f2 | ||
|
|
11dfa58ecd | ||
|
|
412f396e0a | ||
|
|
8100d43ff2 | ||
|
|
bc96acef48 | ||
|
|
6e9876b61a | ||
|
|
32253ca4f7 | ||
|
|
4c6be5e283 | ||
|
|
69178fd7bd | ||
|
|
cd4432c14b | ||
|
|
0e47172aec | ||
|
|
efe18ae8b2 | ||
|
|
d5e13df4fe | ||
|
|
b0e84d74f2 | ||
|
|
28526b591c | ||
|
|
51e6a6edb1 | ||
|
|
c1b132a29e | ||
|
|
e2530e7d57 | ||
|
|
f2fcb1599b | ||
|
|
160eafa0c9 | ||
|
|
c2bc0f1368 | ||
|
|
27d27fff81 | ||
|
|
66e8f0ce18 | ||
|
|
0ceae8361b | ||
|
|
5e52fded83 | ||
|
|
0a7d07b4b6 | ||
|
|
34b5f483d7 | ||
|
|
9d04a1bc52 | ||
|
|
b25c0aaa00 | ||
|
|
652b93ea45 | ||
|
|
ccb7726511 | ||
|
|
c514a4e451 | ||
|
|
d841bd6890 | ||
|
|
ff54e10ab2 | ||
|
|
718e562741 | ||
|
|
ce05e2a939 | ||
|
|
90831d3084 | ||
|
|
2c928dead5 | ||
|
|
3ffe147664 | ||
|
|
3373b0f47c | ||
|
|
b29db04560 | ||
|
|
f964a0362e | ||
|
|
537b23dfae | ||
|
|
48cf3528b4 | ||
|
|
3c4b9d32c9 | ||
|
|
fd8361ae2c | ||
|
|
33cadaa932 | ||
|
|
cc126eb8b4 | ||
|
|
c996b3f6aa | ||
|
|
e2b406a300 | ||
|
|
c2c69da603 | ||
|
|
4e51348ff2 | ||
|
|
3257b82706 | ||
|
|
c98d764daa | ||
|
|
0448429a9f | ||
|
|
b948ac6125 | ||
|
|
3acc09ea16 | ||
|
|
82d0d0de9d | ||
|
|
f9cfc4d087 | ||
|
|
7d68ff455b | ||
|
|
ddf4881971 | ||
|
|
9ad4944142 | ||
|
|
7f33ea76a4 | ||
|
|
1140c29384 | ||
|
|
2441a62f39 | ||
|
|
c26a231fc1 | ||
|
|
2fb2315037 | ||
|
|
a9e475481a | ||
|
|
826d7c4dc3 | ||
|
|
b7f4b37f66 | ||
|
|
193d691bfe | ||
|
|
a359bc581c | ||
|
|
9a28ff025a | ||
|
|
f1c7050700 | ||
|
|
9391c27b9e | ||
|
|
4c54de092f | ||
|
|
690c482a43 | ||
|
|
ad2d857c6f | ||
|
|
07ee59d2ef | ||
|
|
bec4617d0a | ||
|
|
94916f8305 | ||
|
|
44de651be3 | ||
|
|
bdcba9c642 | ||
|
|
c172f75f1a | ||
|
|
ec492fa13a | ||
|
|
702659959c | ||
|
|
fef332a591 | ||
|
|
a65ca72177 | ||
|
|
1108d90768 | ||
|
|
6715aa351f | ||
|
|
851497eb0a | ||
|
|
3bb4663e3e | ||
|
|
6953fcf6b5 | ||
|
|
ab844eee3f | ||
|
|
708e06aa3b | ||
|
|
aa8b8bbcae | ||
|
|
0ce1e15c2c | ||
|
|
105a83d946 | ||
|
|
e9a885a54d | ||
|
|
0a8759ee06 | ||
|
|
33ec21bbac | ||
|
|
7c00f65ecc | ||
|
|
7777c8f135 | ||
|
|
2386490002 | ||
|
|
b620f12027 | ||
|
|
00722181ad | ||
|
|
15e888a939 | ||
|
|
43fa600f1c | ||
|
|
2e4b5399c9 | ||
|
|
62cbb442e8 | ||
|
|
b0fe696935 | ||
|
|
42dbefbb31 | ||
|
|
f3dbe28681 | ||
|
|
6a5f1a7839 | ||
|
|
3b70f9fed4 | ||
|
|
7eb01aaa5c | ||
|
|
1e27e52fba | ||
|
|
16d73619e4 | ||
|
|
bc82696f15 | ||
|
|
fdb90623fc | ||
|
|
5fa62a9770 | ||
|
|
8f3df7e45d | ||
|
|
bb417587ae | ||
|
|
6b6e12cea3 | ||
|
|
65e70b2ca4 | ||
|
|
94d25f6f6a | ||
|
|
4bcf036831 | ||
|
|
901bc69a7d | ||
|
|
465217442b | ||
|
|
e6b40358aa | ||
|
|
9d48f7286a | ||
|
|
80311d3837 | ||
|
|
f501149068 | ||
|
|
750de62828 | ||
|
|
d2f338ceb6 | ||
|
|
e8d66979b3 | ||
|
|
b5180389f8 | ||
|
|
fbd5235e15 | ||
|
|
afd2267c26 | ||
|
|
9e798ababd | ||
|
|
e9f2fc8ee1 | ||
|
|
12198b4f06 | ||
|
|
15fae4d8f8 | ||
|
|
3de3fed858 | ||
|
|
1bf4255d93 | ||
|
|
b91a132e61 | ||
|
|
39302c9e93 | ||
|
|
65e21c4268 | ||
|
|
3d6a6a9fec | ||
|
|
d185902c86 | ||
|
|
8ce4ad83ed | ||
|
|
89620a96bc | ||
|
|
f1c008f934 | ||
|
|
4d688c9b47 | ||
|
|
db5481cc9c | ||
|
|
ce9a5e6484 | ||
|
|
550165b42b | ||
|
|
080551132a | ||
|
|
0a61848365 | ||
|
|
fcb9ca7795 | ||
|
|
71c58cee9e | ||
|
|
c811b6715d | ||
|
|
231829d8cd | ||
|
|
dbd2f8becb | ||
|
|
cc04e6614e | ||
|
|
a5c5ed614c | ||
|
|
ea13241317 | ||
|
|
a377a9ff6a | ||
|
|
f7e510b333 | ||
|
|
4472b80f1c | ||
|
|
577eb3eec9 | ||
|
|
1ed6a1a40f | ||
|
|
fe4cd1cddf | ||
|
|
6d7a8c8130 | ||
|
|
3057aeeacf | ||
|
|
bb5b63f62f | ||
|
|
58cd944618 | ||
|
|
5964b68c86 | ||
|
|
c87aaeba04 | ||
|
|
6e361005dc | ||
|
|
f5ab254bc5 | ||
|
|
298392b409 | ||
|
|
74a2bf0721 | ||
|
|
ddc5dc0316 | ||
|
|
d3af947553 | ||
|
|
36bb2509ac | ||
|
|
e4c2b0c2d3 | ||
|
|
ac5260ad43 | ||
|
|
33857109c9 | ||
|
|
8cc8f76204 | ||
|
|
8f3229928e | ||
|
|
2551992fd8 | ||
|
|
eb1decfce1 | ||
|
|
fd5e7b809f | ||
|
|
1ac681226d | ||
|
|
366940298d | ||
|
|
fa400ded7d | ||
|
|
ec9455ff75 | ||
|
|
2183f31ff5 | ||
|
|
67257a4212 | ||
|
|
001fa60a11 | ||
|
|
0ec3ed8be7 | ||
|
|
3ed0b8a464 | ||
|
|
fd610d44c0 | ||
|
|
b8cc4b4f0f | ||
|
|
396e51c27d | ||
|
|
36e61cb7a2 | ||
|
|
78c6484ddb | ||
|
|
3f1e90a5b3 | ||
|
|
e1bfec898f | ||
|
|
b5b816dac9 | ||
|
|
57854f23b7 | ||
|
|
9d7499b74f | ||
|
|
5b0b85c0f8 | ||
|
|
f7e8df618b | ||
|
|
d00d254c90 | ||
|
|
f9fbde6637 | ||
|
|
7b1a0474db | ||
|
|
da4f9b8e5f | ||
|
|
32f69d24b6 | ||
|
|
d032a61a9e | ||
|
|
07e0dc2ef5 | ||
|
|
9e175e8504 | ||
|
|
6b8a434cda | ||
|
|
554491a642 | ||
|
|
dc4e2f3c85 | ||
|
|
7d2c50991b | ||
|
|
83c204e010 | ||
|
|
316eb049dd | ||
|
|
be347b2428 | ||
|
|
a90c772827 | ||
|
|
26c70976c0 | ||
|
|
657310dc25 | ||
|
|
6e595eaf92 | ||
|
|
997831e33d | ||
|
|
5920cdc48f | ||
|
|
971e73f9cb | ||
|
|
bd9673c9de | ||
|
|
eded97d735 | ||
|
|
fdb1956b0b | ||
|
|
a915c04e9e | ||
|
|
07178ac69a | ||
|
|
9b434d4856 | ||
|
|
0758e97628 | ||
|
|
b486007f95 | ||
|
|
0c0887afef | ||
|
|
805ed81031 | ||
|
|
ec3fddf5b1 | ||
|
|
d7b0bc02ba | ||
|
|
4d1c8eae8f | ||
|
|
989ccf4ae3 | ||
|
|
9c089756c3 | ||
|
|
8d4b0914a8 | ||
|
|
1ae3f89aab | ||
|
|
b984f0423a | ||
|
|
f2f196cfcd | ||
|
|
6471d936bb | ||
|
|
21bbdccc41 | ||
|
|
48946fa4f7 | ||
|
|
9312dda7c2 | ||
|
|
e3013329ee | ||
|
|
38a0d2d740 | ||
|
|
5c2adf1e14 | ||
|
|
7ddd2c04c8 | ||
|
|
9a55632d8e | ||
|
|
f8b4427505 | ||
|
|
f1efc1456d | ||
|
|
2ea5851b67 | ||
|
|
a3051bc4e3 | ||
|
|
d454427b8b | ||
|
|
4b41bd6adf | ||
|
|
cdd044d120 | ||
|
|
213a793fbc | ||
|
|
a8a567c588 | ||
|
|
fefe89a1ed | ||
|
|
493fe2d523 | ||
|
|
d8fc830f1d | ||
|
|
b6c3ba0f0d | ||
|
|
32cd39d158 | ||
|
|
203275817f | ||
|
|
c05c3396b5 | ||
|
|
8f172aec8a | ||
|
|
263a7e2134 | ||
|
|
a2ea216604 | ||
|
|
77c572f990 | ||
|
|
bb0c346c4d | ||
|
|
2ce8e1fd21 | ||
|
|
ecfd94aeb1 | ||
|
|
eddc672264 | ||
|
|
8c71a39487 | ||
|
|
ff0ac27723 | ||
|
|
ad7134d283 | ||
|
|
58723ae52e | ||
|
|
52723eda6e | ||
|
|
4a4636571e | ||
|
|
32d8da2131 | ||
|
|
bb34a932ff | ||
|
|
50796bea7a | ||
|
|
d678946044 | ||
|
|
fdafb8b0d3 | ||
|
|
c8b84163c9 | ||
|
|
ab489befe6 | ||
|
|
67f3adbe4c | ||
|
|
9b018ff885 | ||
|
|
3c2b0a58a1 | ||
|
|
2a13301d35 | ||
|
|
333f74dba0 | ||
|
|
ffaa267b5e | ||
|
|
ff80a47123 | ||
|
|
17c31c64d9 | ||
|
|
add2134274 | ||
|
|
3547153c0a | ||
|
|
76b8ac157d | ||
|
|
e09a04d593 | ||
|
|
f6187ee9ca | ||
|
|
1fbf72cb6b | ||
|
|
bcb2987f60 | ||
|
|
75b6d376c4 | ||
|
|
9794b5cf27 | ||
|
|
89a7128236 | ||
|
|
c1d6021a3a | ||
|
|
d5bb5e9287 | ||
|
|
466ec0e66c | ||
|
|
f0ebfcdd69 | ||
|
|
fb15329aee | ||
|
|
c35dc7ea4a | ||
|
|
6dea923866 | ||
|
|
bcf1ef1d31 | ||
|
|
9bf3171cfa | ||
|
|
70e327a3c1 | ||
|
|
af815287ed | ||
|
|
d5187b3099 | ||
|
|
fd8d34e8bc | ||
|
|
4ba1c0259f | ||
|
|
17a39f3305 | ||
|
|
b69a0d5137 | ||
|
|
f576b24fc8 | ||
|
|
f9864eeda0 | ||
|
|
03db9d3f74 | ||
|
|
677e20a1a4 | ||
|
|
4a8150d613 | ||
|
|
afd152c073 | ||
|
|
d57db6c39e | ||
|
|
0b2e1f1917 | ||
|
|
9a666891fd | ||
|
|
9c383baff3 | ||
|
|
3e9b4d34bd | ||
|
|
122ddd3e72 | ||
|
|
f61d800147 | ||
|
|
901806e98b | ||
|
|
920d6a8692 | ||
|
|
8eb2fbeb18 | ||
|
|
96e91c4d70 | ||
|
|
94c6253c70 | ||
|
|
04d99f1928 | ||
|
|
94a174c405 | ||
|
|
2e26750006 | ||
|
|
e7e80944e9 | ||
|
|
ff6c1e4127 | ||
|
|
a67e3f4c58 | ||
|
|
a4c92ea0ea | ||
|
|
f4ffb42c91 | ||
|
|
0ec9f37d2f | ||
|
|
15f500f91a | ||
|
|
5120c1d869 | ||
|
|
725fcf80aa | ||
|
|
6fe8c81312 | ||
|
|
befcdd3dfa | ||
|
|
766fcf75cd | ||
|
|
d2a1433ff8 | ||
|
|
cfd4339c41 | ||
|
|
365c3fe3ad | ||
|
|
f8af960909 | ||
|
|
121b24b7d1 | ||
|
|
c7b463d61e | ||
|
|
520a5fc756 | ||
|
|
f45edc18a9 | ||
|
|
38f0f9a84d | ||
|
|
1c6d42e60d | ||
|
|
6cdfddd2ff | ||
|
|
a1074f1a81 | ||
|
|
a90a3f12e7 | ||
|
|
47d74a7742 | ||
|
|
7fd53c1bc3 | ||
|
|
ad949632b4 | ||
|
|
ebc96bed06 | ||
|
|
c4a3a1e0b5 | ||
|
|
07beb094fb | ||
|
|
280a4df4f2 | ||
|
|
ccc2aecbd4 | ||
|
|
a2cc3e913d | ||
|
|
b28cfede8c | ||
|
|
73c5764495 | ||
|
|
e84fd1fd65 | ||
|
|
776b41e866 | ||
|
|
985efc67cc | ||
|
|
223073e3df | ||
|
|
783db5c3dc | ||
|
|
eb40369c30 | ||
|
|
e92bbffc53 | ||
|
|
d1424b3c9c | ||
|
|
1d0cc950a1 | ||
|
|
01bc745478 | ||
|
|
aedc8de964 | ||
|
|
3f5f50fe38 | ||
|
|
4fd5d868c6 | ||
|
|
e21386c1d5 | ||
|
|
aa8c46d232 | ||
|
|
7a57922891 | ||
|
|
58068b34bf | ||
|
|
1dc4bd313a | ||
|
|
890bd12e99 | ||
|
|
bf04261af6 | ||
|
|
f3dce4f7a7 | ||
|
|
29dfd303db | ||
|
|
521b3ded9c | ||
|
|
e4395ddd55 | ||
|
|
6d05ad9815 | ||
|
|
0290b837f2 | ||
|
|
833bf0520c | ||
|
|
239826ce1f | ||
|
|
b1547a6d28 | ||
|
|
4603e6b46d | ||
|
|
26050bad5b | ||
|
|
810cc6c2f8 | ||
|
|
8fb6f5b11d | ||
|
|
db36cdf379 | ||
|
|
3efdfad37d | ||
|
|
7fc06a2740 | ||
|
|
65c197d9ae | ||
|
|
a3060ed295 | ||
|
|
2378b01ea9 | ||
|
|
60c2c409b0 | ||
|
|
1c84ceda2e | ||
|
|
1a6f8fc504 | ||
|
|
39fbdab93c | ||
|
|
1802caf25f | ||
|
|
7c2cd453eb | ||
|
|
a07a0b05bc | ||
|
|
d0d8de9028 | ||
|
|
30ed31cebe | ||
|
|
bec7644798 | ||
|
|
327b4f4bba | ||
|
|
39f1796da6 | ||
|
|
fdb644fc6d | ||
|
|
df73234234 | ||
|
|
95dc87a91b | ||
|
|
5801857883 | ||
|
|
1c37b58177 | ||
|
|
d8d831c2a0 | ||
|
|
260f007e5b | ||
|
|
69528cbe66 | ||
|
|
c268e0613c | ||
|
|
714e96cc6e | ||
|
|
89dd56a0ff | ||
|
|
0271fe5ca0 | ||
|
|
89d7189a0f | ||
|
|
fca3d138c5 | ||
|
|
354bd90cfa | ||
|
|
c1f86cb502 | ||
|
|
fd2fdbe2f9 | ||
|
|
58b5d3cf83 | ||
|
|
87fb26d271 | ||
|
|
05271bc110 | ||
|
|
6f1aa6a1b1 | ||
|
|
c7a8a62cf2 | ||
|
|
2448f9b029 | ||
|
|
e90e10587b | ||
|
|
b11a33d3da | ||
|
|
73f7167b63 | ||
|
|
05e3be418d | ||
|
|
b09fd48d61 | ||
|
|
c62ab62bf9 | ||
|
|
44b0208846 | ||
|
|
e444e39fd0 | ||
|
|
76c6065a80 | ||
|
|
f96777bcf9 | ||
|
|
4a3ff78636 | ||
|
|
4ab119d6c9 | ||
|
|
f2d207d1d4 | ||
|
|
4bab3e262c | ||
|
|
e0c2720d31 | ||
|
|
e3ae44d033 | ||
|
|
e04ba94ace | ||
|
|
9a9481a88e | ||
|
|
3609043e4c | ||
|
|
3de2c47c56 | ||
|
|
8ca21bb92e | ||
|
|
258d18112c | ||
|
|
ff9d5442ab | ||
|
|
4a3b767002 | ||
|
|
ee2d7ca79e | ||
|
|
89c441ba58 | ||
|
|
c3c775786c | ||
|
|
33ae08be65 | ||
|
|
593bce5155 | ||
|
|
31c035eb52 | ||
|
|
fc19fbac68 | ||
|
|
c188028de5 | ||
|
|
43f9a5b1d0 | ||
|
|
c81cb04bd0 | ||
|
|
d7452238d6 | ||
|
|
fb99733a1e | ||
|
|
7c4f34bb6c | ||
|
|
9882cd53cf | ||
|
|
052b882195 | ||
|
|
3a8053c3c6 | ||
|
|
d0b5992146 | ||
|
|
37343750cd | ||
|
|
f1a951b2e4 | ||
|
|
6783da028c | ||
|
|
ee7ba35068 | ||
|
|
886e3aefb0 | ||
|
|
ccc80d5ce4 | ||
|
|
e468a91468 | ||
|
|
01045c973f | ||
|
|
5a8d6087f9 | ||
|
|
f0c663aca8 | ||
|
|
0a801d29cd | ||
|
|
52526800f9 | ||
|
|
98ec0532b2 | ||
|
|
674a38e80f | ||
|
|
753c128357 | ||
|
|
a4b16dd1e9 | ||
|
|
13ff0e08bb | ||
|
|
9a9a6410e1 | ||
|
|
af267fede4 | ||
|
|
cc0923b3c7 | ||
|
|
5f7a3d0bcf | ||
|
|
ed427c1352 | ||
|
|
a8825c385b | ||
|
|
6d69a192f3 | ||
|
|
a694b422cf | ||
|
|
60c75b4814 | ||
|
|
efd2805602 | ||
|
|
b3c905c95a | ||
|
|
868615fa89 | ||
|
|
08937a9a66 | ||
|
|
ce205dc95d | ||
|
|
53b04879a0 | ||
|
|
91e7906a0b | ||
|
|
7f73e26016 | ||
|
|
d0b54d1950 | ||
|
|
da9429351f | ||
|
|
c0a9bd14aa | ||
|
|
0585428029 | ||
|
|
bfb591977e | ||
|
|
1fff7ef1d3 | ||
|
|
351132fb5b | ||
|
|
f29e87f45b | ||
|
|
69a1468c18 | ||
|
|
0a8c352194 | ||
|
|
ab29373537 | ||
|
|
b304f11b18 | ||
|
|
4cf7a3244f | ||
|
|
bd46196fd0 | ||
|
|
d79e1d6c94 | ||
|
|
5b51653d78 | ||
|
|
5246d84599 | ||
|
|
9409ea75e5 | ||
|
|
970cb97f73 | ||
|
|
a1585142b7 | ||
|
|
ba106ac8f3 | ||
|
|
558d83c957 | ||
|
|
7b38950f3c | ||
|
|
67333c00b9 | ||
|
|
7a6ab5b7c7 | ||
|
|
a149458593 | ||
|
|
fe27a32dcb | ||
|
|
a6095f7aa1 | ||
|
|
8791b7e3f1 | ||
|
|
e843ef6ffc | ||
|
|
b3c2f3a3fc | ||
|
|
3d533b56ef | ||
|
|
b43832fa8f | ||
|
|
a5332b31f1 | ||
|
|
fa604af6ea | ||
|
|
dbb0d506af | ||
|
|
785bdb5bb3 | ||
|
|
343754061a | ||
|
|
7572136cc8 | ||
|
|
6b7fe81cf8 | ||
|
|
f5e53e814b | ||
|
|
b8b05b923f | ||
|
|
22bacfdcb3 | ||
|
|
d138c4eeb8 | ||
|
|
f0f4e85f06 | ||
|
|
e2261af59f | ||
|
|
ff74edcc04 | ||
|
|
735f830251 | ||
|
|
abcf37ea92 | ||
|
|
8da95c7102 | ||
|
|
72d875aa4f | ||
|
|
8130880f2d | ||
|
|
d98b716dfc | ||
|
|
6bd8a17a5f | ||
|
|
ded28baa2f | ||
|
|
5c0ee0cfb3 | ||
|
|
c7d6484eb8 | ||
|
|
42ebf91a67 | ||
|
|
d8c9720723 | ||
|
|
8e7dfcaa76 | ||
|
|
a72b33597d | ||
|
|
6e37d8d850 | ||
|
|
ce51108f7f | ||
|
|
9e56a4a10d | ||
|
|
76b1c83add | ||
|
|
650b95c4f1 | ||
|
|
ceebfc9aca | ||
|
|
2e443db362 | ||
|
|
e15690781f | ||
|
|
35f7c90c19 | ||
|
|
717f9765e1 | ||
|
|
607cd5d1e0 | ||
|
|
4e5bb81906 | ||
|
|
24163b2644 | ||
|
|
54bb034cac | ||
|
|
7c2f7d7eeb | ||
|
|
fcd1aa5d76 | ||
|
|
1f5ee1ee3f | ||
|
|
bbbcc4a185 | ||
|
|
f8c5f4f1cc | ||
|
|
78f8badddd | ||
|
|
5223cf3763 | ||
|
|
39b7fca11f | ||
|
|
904a4a61e9 | ||
|
|
f146946319 | ||
|
|
db9faa2f4b | ||
|
|
d9ec74b149 | ||
|
|
ba1f8c9a3a | ||
|
|
f496896884 | ||
|
|
1ea8addb04 | ||
|
|
f7df63e2af | ||
|
|
2807fc2b8e | ||
|
|
fbb5ede272 | ||
|
|
8e1c8304d8 | ||
|
|
dbfc11e822 | ||
|
|
0235f37faa | ||
|
|
ef7272cf80 | ||
|
|
840df1dab6 | ||
|
|
1f75d70d4e | ||
|
|
ede597d02d | ||
|
|
8db20eb2ba | ||
|
|
a70fcf488d | ||
|
|
c544a069a2 | ||
|
|
4b74a8a008 | ||
|
|
4c83351b26 | ||
|
|
e67f4e5f29 | ||
|
|
087ffcbb95 | ||
|
|
b860e35408 | ||
|
|
94eba806e3 | ||
|
|
fa77455c3e | ||
|
|
4a881fd2fd | ||
|
|
4f7d6a8402 | ||
|
|
4ca95b08e2 | ||
|
|
0d5be65879 | ||
|
|
3b96d14f84 | ||
|
|
1dfde958bf | ||
|
|
cb20f595ac | ||
|
|
720256968e | ||
|
|
7fb6250029 | ||
|
|
3cac32ac78 | ||
|
|
a5fb1205af | ||
|
|
41e7dce861 | ||
|
|
10f68a4630 | ||
|
|
94090f6997 | ||
|
|
f50f1680df | ||
|
|
fd1832243e | ||
|
|
52e8ba702d | ||
|
|
ed9bbd30a3 | ||
|
|
035d06bbfe | ||
|
|
39c6fa9e55 | ||
|
|
489ac20141 | ||
|
|
9ffde34198 | ||
|
|
0100b805ee | ||
|
|
6a341b88f0 | ||
|
|
1cef6f0db7 | ||
|
|
f7645e8f25 | ||
|
|
0adb8c142b | ||
|
|
e7eb57375e | ||
|
|
d264a16065 | ||
|
|
67f572285b | ||
|
|
d5d76e248f | ||
|
|
67fcc8ac67 | ||
|
|
ceca5dd0c4 | ||
|
|
2420aedde9 | ||
|
|
5163bcb72c | ||
|
|
457c845af8 | ||
|
|
3a87b30140 | ||
|
|
73330ecb1a | ||
|
|
b605316560 | ||
|
|
ed116b688f | ||
|
|
c3346ff605 | ||
|
|
412d25db30 | ||
|
|
1ed670cf40 | ||
|
|
ec67fc12e0 | ||
|
|
09ef68e1c5 | ||
|
|
3cc9910f61 | ||
|
|
3d120b3505 | ||
|
|
f5462c9b27 | ||
|
|
a30c6520d4 | ||
|
|
5326ffbcc9 | ||
|
|
ff0ba89a3f | ||
|
|
bc7c3bd74b | ||
|
|
a9ff875a3a | ||
|
|
4e8aae4f9e | ||
|
|
1bc6ac06a4 | ||
|
|
122dddea9e | ||
|
|
97616213db | ||
|
|
2e09667bab | ||
|
|
bb32af93b2 | ||
|
|
ac93672752 | ||
|
|
a1021fbca7 | ||
|
|
45f398bf30 | ||
|
|
81743c9c29 | ||
|
|
d8ae2bf455 | ||
|
|
2d07186eb1 | ||
|
|
d1cd8848eb | ||
|
|
0acfb6040e | ||
|
|
24857eaa7f | ||
|
|
c281f85742 | ||
|
|
aa44572be5 | ||
|
|
631885e364 | ||
|
|
fed489f9d9 | ||
|
|
8183207802 | ||
|
|
7c76d4efa1 | ||
|
|
0a090b5694 | ||
|
|
c0a20b0f5d | ||
|
|
b5a40d07cf | ||
|
|
1985b16824 | ||
|
|
06d05ec860 | ||
|
|
11af5e1429 | ||
|
|
440e95515a | ||
|
|
4cf5d9cb43 | ||
|
|
c910167ff6 | ||
|
|
ed0d975e43 | ||
|
|
8f2bd45872 | ||
|
|
063de00e45 | ||
|
|
4b18397e69 | ||
|
|
52dd08883f | ||
|
|
8a2cfea677 | ||
|
|
4e104194bc | ||
|
|
80d05c276f | ||
|
|
f676ca9078 | ||
|
|
967158f216 | ||
|
|
3b621e73f6 | ||
|
|
357c9b0813 | ||
|
|
71a56031e2 | ||
|
|
d714213cc2 | ||
|
|
33a5556b8a | ||
|
|
a864c76955 | ||
|
|
109a477f9e | ||
|
|
c159fb1dac | ||
|
|
52e21a020e | ||
|
|
9296e0cc0d | ||
|
|
f61ed5ddf5 | ||
|
|
f236d2087a | ||
|
|
441945e075 | ||
|
|
26ab6513a2 | ||
|
|
b0ec7a2a82 | ||
|
|
caa5e7dd96 | ||
|
|
75f4f0d43a | ||
|
|
6ea3057b23 | ||
|
|
1724e25c3b | ||
|
|
5af439d926 | ||
|
|
40991c4b7e | ||
|
|
614548f58a | ||
|
|
d7fe3595d3 | ||
|
|
088b4fa4fe | ||
|
|
ca3da473d7 | ||
|
|
11518a0806 | ||
|
|
ebd3bb386c | ||
|
|
afc4189577 | ||
|
|
4c0a14b96c | ||
|
|
8dba9a7d9e | ||
|
|
891c171247 | ||
|
|
308f52c6f9 | ||
|
|
092ad10c56 | ||
|
|
c492d25f4c | ||
|
|
4f99518d52 | ||
|
|
a2ab216531 | ||
|
|
7ab46d61b5 | ||
|
|
b5928be876 | ||
|
|
abc2a3fa72 | ||
|
|
10fc131e13 | ||
|
|
5bb3c012c9 | ||
|
|
b5b2e225ce | ||
|
|
bed2b1e7f7 | ||
|
|
6d48265618 | ||
|
|
43c9e70a65 | ||
|
|
e2fc83c81b | ||
|
|
ae8098d53e | ||
|
|
857edd9aa6 | ||
|
|
ca3d076607 | ||
|
|
35128b89b1 | ||
|
|
157c475f5c | ||
|
|
4483baae19 | ||
|
|
3511cd977a | ||
|
|
d69c35fa3c | ||
|
|
dee7e621de | ||
|
|
0ae248926d | ||
|
|
4371ad1535 | ||
|
|
4137eaec6d | ||
|
|
14ff4282c0 | ||
|
|
b7d324f1b0 | ||
|
|
81bf3fc15f | ||
|
|
22ebe00cf6 | ||
|
|
3ae00cadb9 | ||
|
|
1992ef050a | ||
|
|
e7f79589d4 | ||
|
|
ff3f90ac94 | ||
|
|
dff3462113 | ||
|
|
a2172d12f4 | ||
|
|
ffb91d2733 | ||
|
|
485482c868 | ||
|
|
b16a7150fa | ||
|
|
00613cdda3 | ||
|
|
32ecc5dbad | ||
|
|
e1a12bcb14 | ||
|
|
0283b34190 | ||
|
|
26cfbeb3a8 | ||
|
|
b95d48e2ad | ||
|
|
2bfa37ca2e | ||
|
|
6a7b6f3e6b | ||
|
|
a9462da78e | ||
|
|
02f2043a8c | ||
|
|
acfbdc6405 | ||
|
|
25ec271a7f | ||
|
|
a3555af684 | ||
|
|
de55eeb183 | ||
|
|
8fc9204946 | ||
|
|
5b31ce8484 | ||
|
|
e5a328e9ea | ||
|
|
14a10fc6f0 | ||
|
|
941bdfb2e1 | ||
|
|
d431516270 | ||
|
|
92e88674f6 | ||
|
|
89d15c40da | ||
|
|
ddf9a3ef2d | ||
|
|
0a0a08b97d | ||
|
|
3fa614341f | ||
|
|
b112202f41 | ||
|
|
9d66a7ec4a | ||
|
|
48f633889a | ||
|
|
fd9cff9392 | ||
|
|
86a4938b5f | ||
|
|
9fd642fe0e | ||
|
|
0035c8c08e | ||
|
|
151fca146e | ||
|
|
1bea55c5e8 | ||
|
|
8ce28dd311 | ||
|
|
54b3fc3ae6 | ||
|
|
b8de713497 | ||
|
|
0ee60efaa7 | ||
|
|
1244cdb73e | ||
|
|
4b63fc4757 | ||
|
|
b65159dd43 | ||
|
|
842608afa0 | ||
|
|
57f5fd51e6 | ||
|
|
6a135cb47c | ||
|
|
1ac3ab48f2 | ||
|
|
7f104bdc91 | ||
|
|
e927413e11 | ||
|
|
b4adacd9e0 | ||
|
|
04bd613fc9 | ||
|
|
dd2c92d805 | ||
|
|
044c8dbb3a | ||
|
|
1c5859d93c | ||
|
|
8388aad831 | ||
|
|
de97b9f298 | ||
|
|
db30c0253d | ||
|
|
0590c00c9b | ||
|
|
6ab0a42f67 | ||
|
|
7cb3a4e16e | ||
|
|
b2d3f492ec | ||
|
|
43f79663d9 | ||
|
|
e6d84cb245 | ||
|
|
0403c1f1b5 | ||
|
|
91eb26dac2 | ||
|
|
93a8f91eb1 | ||
|
|
7093261f84 | ||
|
|
ec7df134b4 | ||
|
|
bfa98646c1 | ||
|
|
3bd84a0efd | ||
|
|
cd7d7c303a | ||
|
|
8f41b38bbf | ||
|
|
0bdfa1a3b9 | ||
|
|
9ae201bddf | ||
|
|
7773858340 | ||
|
|
93e44a6019 | ||
|
|
3edb2ea9f2 | ||
|
|
725dbd2979 | ||
|
|
a61554bd04 | ||
|
|
fe0d005f97 | ||
|
|
c4074d842d | ||
|
|
e7d4143f47 | ||
|
|
08059e3a32 | ||
|
|
0bef1a157b | ||
|
|
c427878820 | ||
|
|
23cd6553a9 | ||
|
|
9df759da60 | ||
|
|
f31a92ea98 | ||
|
|
79966db251 | ||
|
|
443e6b6bee | ||
|
|
157a54f30c | ||
|
|
746b427943 | ||
|
|
86df1fd98e | ||
|
|
f0a276773e | ||
|
|
d4b21cbe6a | ||
|
|
160990f979 | ||
|
|
ee640da9e7 | ||
|
|
4d43a6bdd6 | ||
|
|
d80622ca69 | ||
|
|
4beff6e62f | ||
|
|
6138c7da9d | ||
|
|
cf49641d5c | ||
|
|
457801f752 | ||
|
|
f7c7b6a5ba | ||
|
|
2337b203d0 | ||
|
|
e10bb9e3f2 | ||
|
|
b63d1f1292 | ||
|
|
a29c9bf563 | ||
|
|
f19adde4e5 | ||
|
|
721aea945a | ||
|
|
01a0d07151 | ||
|
|
15c9edd49f | ||
|
|
3a502c5b3d | ||
|
|
30775373dc | ||
|
|
7e194407f6 | ||
|
|
8caae5996e | ||
|
|
0664032ef7 | ||
|
|
67c6a12be4 | ||
|
|
6b028142ee | ||
|
|
d4eabf2d7e | ||
|
|
c7abc37671 | ||
|
|
1637325625 | ||
|
|
a69a155679 | ||
|
|
7ff6d860ce | ||
|
|
b440be717c | ||
|
|
d8752719c1 | ||
|
|
737a0ff9cb | ||
|
|
1c8e676822 | ||
|
|
7b98f0fc92 | ||
|
|
45865f2e71 | ||
|
|
eded2df687 | ||
|
|
766d3f6670 | ||
|
|
3f2d0a13af | ||
|
|
690957e1c3 | ||
|
|
3092b56fd6 | ||
|
|
c000aa2602 | ||
|
|
ccfc46d743 | ||
|
|
385eb5cc18 | ||
|
|
2ff7d81a9b | ||
|
|
644c4fd3a4 | ||
|
|
d0a931bae8 | ||
|
|
5583714c7a |
135
.env
Normal file
135
.env
Normal file
@@ -0,0 +1,135 @@
|
||||
#### Important Note ####
|
||||
# This file is used to store environment variables for the Prowler App.
|
||||
# For production, it is recommended to use a secure method to store these variables and change the default secret keys.
|
||||
|
||||
#### Prowler UI Configuration ####
|
||||
PROWLER_UI_VERSION="stable"
|
||||
AUTH_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
|
||||
AUTH_TRUST_HOST=true
|
||||
UI_PORT=3000
|
||||
# openssl rand -base64 32
|
||||
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
|
||||
|
||||
#### Prowler API Configuration ####
|
||||
PROWLER_API_VERSION="stable"
|
||||
# PostgreSQL settings
|
||||
# If running Django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
POSTGRES_HOST=postgres-db
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_ADMIN_USER=prowler_admin
|
||||
POSTGRES_ADMIN_PASSWORD=postgres
|
||||
POSTGRES_USER=prowler
|
||||
POSTGRES_PASSWORD=postgres
|
||||
POSTGRES_DB=prowler_db
|
||||
|
||||
# Valkey settings
|
||||
# If running Valkey and celery on host, use localhost, else use 'valkey'
|
||||
VALKEY_HOST=valkey
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_DB=0
|
||||
|
||||
# API scan settings
|
||||
|
||||
# The path to the directory where scan output should be stored
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY="/tmp/prowler_api_output"
|
||||
|
||||
# The maximum number of findings to process in a single batch
|
||||
DJANGO_FINDINGS_BATCH_SIZE=1000
|
||||
|
||||
# The AWS access key to be used when uploading scan output to an S3 bucket
|
||||
# If left empty, default AWS credentials resolution behavior will be used
|
||||
DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID=""
|
||||
|
||||
# The AWS secret key to be used when uploading scan output to an S3 bucket
|
||||
DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY=""
|
||||
|
||||
# An optional AWS session token
|
||||
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN=""
|
||||
|
||||
# The AWS region where your S3 bucket is located (e.g., "us-east-1")
|
||||
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION=""
|
||||
|
||||
# The name of the S3 bucket where scan output should be stored
|
||||
DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET=""
|
||||
|
||||
# Django settings
|
||||
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api
|
||||
DJANGO_BIND_ADDRESS=0.0.0.0
|
||||
DJANGO_PORT=8080
|
||||
DJANGO_DEBUG=False
|
||||
DJANGO_SETTINGS_MODULE=config.django.production
|
||||
# Select one of [ndjson|human_readable]
|
||||
DJANGO_LOGGING_FORMATTER=human_readable
|
||||
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
|
||||
# Applies to both Django and Celery Workers
|
||||
DJANGO_LOGGING_LEVEL=INFO
|
||||
# Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_WORKERS=4
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME=30
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME=1440
|
||||
DJANGO_CACHE_MAX_AGE=3600
|
||||
DJANGO_STALE_WHILE_REVALIDATE=60
|
||||
DJANGO_MANAGE_DB_PARTITIONS=True
|
||||
# openssl genrsa -out private.pem 2048
|
||||
DJANGO_TOKEN_SIGNING_KEY="-----BEGIN PRIVATE KEY-----
|
||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDs4e+kt7SnUJek
|
||||
6V5r9zMGzXCoU5qnChfPiqu+BgANyawz+MyVZPs6RCRfeo6tlCknPQtOziyXYM2I
|
||||
7X+qckmuzsjqp8+u+o1mw3VvUuJew5k2SQLPYwsiTzuFNVJEOgRo3hywGiGwS2iv
|
||||
/5nh2QAl7fq2qLqZEXQa5+/xJlQggS1CYxOJgggvLyra50QZlBvPve/AxKJ/EV/Q
|
||||
irWTZU5lLNI8sH2iZR05vQeBsxZ0dCnGMT+vGl+cGkqrvzQzKsYbDmabMcfTYhYi
|
||||
78fpv6A4uharJFHayypYBjE39PwhMyyeycrNXlpm1jpq+03HgmDuDMHydk1tNwuT
|
||||
nEC7m7iNAgMBAAECggEAA2m48nJcJbn9SVi8bclMwKkWmbJErOnyEGEy2sTK3Of+
|
||||
NWx9BB0FmqAPNxn0ss8K7cANKOhDD7ZLF9E2MO4/HgfoMKtUzHRbM7MWvtEepldi
|
||||
nnvcUMEgULD8Dk4HnqiIVjt3BdmGiTv46OpBnRWrkSBV56pUL+7msZmMZTjUZvh2
|
||||
ZWv0+I3gtDIjo2Zo/FiwDV7CfwRjJarRpYUj/0YyuSA4FuOUYl41WAX1I301FKMH
|
||||
xo3jiAYi1s7IneJ16OtPpOA34Wg5F6ebm/UO0uNe+iD4kCXKaZmxYQPh5tfB0Qa3
|
||||
qj1T7GNpFNyvtG7VVdauhkb8iu8X/wl6PCwbg0RCKQKBgQD9HfpnpH0lDlHMRw9K
|
||||
X7Vby/1fSYy1BQtlXFEIPTN/btJ/asGxLmAVwJ2HAPXWlrfSjVAH7CtVmzN7v8oj
|
||||
HeIHfeSgoWEu1syvnv2AMaYSo03UjFFlfc/GUxF7DUScRIhcJUPCP8jkAROz9nFv
|
||||
DByNjUL17Q9r43DmDiRsy0IFqQKBgQDvlJ9Uhl+Sp7gRgKYwa/IG0+I4AduAM+Gz
|
||||
Dxbm52QrMGMTjaJFLmLHBUZ/ot+pge7tZZGws8YR8ufpyMJbMqPjxhIvRRa/p1Tf
|
||||
E3TQPW93FMsHUvxAgY3MV5MzXFPhlNAKb+akP/RcXUhetGAuZKLubtDCWa55ZQuL
|
||||
wj2OS+niRQKBgE7K8zUqNi6/22S8xhy/2GPgB1qPObbsABUofK0U6CAGLo6te+gc
|
||||
6Jo84IyzFtQbDNQFW2Fr+j1m18rw9AqkdcUhQndiZS9AfG07D+zFB86LeWHt4DS4
|
||||
ymIRX8Kvaak/iDcu/n3Mf0vCrhB6aetImObTj4GgrwlFvtJOmrYnO8EpAoGAIXXP
|
||||
Xt25gWD9OyyNiVu6HKwA/zN7NYeJcRmdaDhO7B1A6R0x2Zml4AfjlbXoqOLlvLAf
|
||||
zd79vcoAC82nH1eOPiSOq51plPDI0LMF8IN0CtyTkn1Lj7LIXA6rF1RAvtOqzppc
|
||||
SvpHpZK9pcRpXnFdtBE0BMDDtl6fYzCIqlP94UUCgYEAnhXbAQMF7LQifEm34Dx8
|
||||
BizRMOKcqJGPvbO2+Iyt50O5X6onU2ITzSV1QHtOvAazu+B1aG9pEuBFDQ+ASxEu
|
||||
L9ruJElkOkb/o45TSF6KCsHd55ReTZ8AqnRjf5R+lyzPqTZCXXb8KTcRvWT4zQa3
|
||||
VxyT2PnaSqEcexWUy4+UXoQ=
|
||||
-----END PRIVATE KEY-----"
|
||||
# openssl rsa -in private.pem -pubout -out public.pem
|
||||
DJANGO_TOKEN_VERIFYING_KEY="-----BEGIN PUBLIC KEY-----
|
||||
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA7OHvpLe0p1CXpOlea/cz
|
||||
Bs1wqFOapwoXz4qrvgYADcmsM/jMlWT7OkQkX3qOrZQpJz0LTs4sl2DNiO1/qnJJ
|
||||
rs7I6qfPrvqNZsN1b1LiXsOZNkkCz2MLIk87hTVSRDoEaN4csBohsEtor/+Z4dkA
|
||||
Je36tqi6mRF0Gufv8SZUIIEtQmMTiYIILy8q2udEGZQbz73vwMSifxFf0Iq1k2VO
|
||||
ZSzSPLB9omUdOb0HgbMWdHQpxjE/rxpfnBpKq780MyrGGw5mmzHH02IWIu/H6b+g
|
||||
OLoWqyRR2ssqWAYxN/T8ITMsnsnKzV5aZtY6avtNx4Jg7gzB8nZNbTcLk5xAu5u4
|
||||
jQIDAQAB
|
||||
-----END PUBLIC KEY-----"
|
||||
# openssl rand -base64 32
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
|
||||
DJANGO_SENTRY_DSN=
|
||||
|
||||
# Sentry settings
|
||||
SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.5.1
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
SOCIAL_GITHUB_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/github"
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
9
.github/CODEOWNERS
vendored
9
.github/CODEOWNERS
vendored
@@ -1,5 +1,6 @@
|
||||
* @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
|
||||
# To protect a repository fully against unauthorized changes, you also need to define an owner for the CODEOWNERS file itself.
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-and-branch-protection
|
||||
/* @prowler-cloud/sdk
|
||||
/.github/ @prowler-cloud/sdk
|
||||
prowler @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
tests @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
api @prowler-cloud/api
|
||||
ui @prowler-cloud/ui
|
||||
3
.github/codeql/api-codeql-config.yml
vendored
Normal file
3
.github/codeql/api-codeql-config.yml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
name: "API - CodeQL Config"
|
||||
paths:
|
||||
- "api/"
|
||||
4
.github/codeql/sdk-codeql-config.yml
vendored
Normal file
4
.github/codeql/sdk-codeql-config.yml
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
name: "SDK - CodeQL Config"
|
||||
paths-ignore:
|
||||
- "api/"
|
||||
- "ui/"
|
||||
3
.github/codeql/ui-codeql-config.yml
vendored
Normal file
3
.github/codeql/ui-codeql-config.yml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
name: "UI - CodeQL Config"
|
||||
paths:
|
||||
- "ui/"
|
||||
114
.github/dependabot.yml
vendored
114
.github/dependabot.yml
vendored
@@ -5,42 +5,116 @@
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
# v5
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/api"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "component/api"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
|
||||
- package-ecosystem: "pip"
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "npm"
|
||||
# directory: "/ui"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "npm"
|
||||
# - "component/ui"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v3"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v3"
|
||||
- "docker"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/04/15
|
||||
# v4.6
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v4"
|
||||
|
||||
# - package-ecosystem: "github-actions"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "github_actions"
|
||||
# - "v4"
|
||||
|
||||
# - package-ecosystem: "docker"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "docker"
|
||||
# - "v4"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# v3
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v3"
|
||||
|
||||
# - package-ecosystem: "github-actions"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "github_actions"
|
||||
# - "v3"
|
||||
|
||||
18
.github/labeler.yml
vendored
18
.github/labeler.yml
vendored
@@ -22,6 +22,11 @@ provider/kubernetes:
|
||||
- any-glob-to-any-file: "prowler/providers/kubernetes/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/**"
|
||||
|
||||
provider/github:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/github/**"
|
||||
- any-glob-to-any-file: "tests/providers/github/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
@@ -80,9 +85,20 @@ output/csv:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/csv/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/csv/**"
|
||||
|
||||
component/api:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "api/**"
|
||||
|
||||
component/ui:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "ui/**"
|
||||
|
||||
compliance:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/compliance/**"
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/compliance/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/compliance/**"
|
||||
- any-glob-to-any-file: "prowler/compliance/**"
|
||||
|
||||
review-django-migrations:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "api/src/backend/api/migrations/**"
|
||||
|
||||
6
.github/pull_request_template.md
vendored
6
.github/pull_request_template.md
vendored
@@ -15,6 +15,12 @@ Please include a summary of the change and which issue is fixed. List any depend
|
||||
- [ ] Review if the code is being covered by tests.
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
- [ ] Review if backport is needed.
|
||||
- [ ] Review if is needed to change the [Readme.md](https://github.com/prowler-cloud/prowler/blob/master/README.md)
|
||||
|
||||
#### API
|
||||
- [ ] Verify if API specs need to be regenerated.
|
||||
- [ ] Check if version updates are required (e.g., specs, Poetry, etc.).
|
||||
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/api/CHANGELOG.md), if applicable.
|
||||
|
||||
### License
|
||||
|
||||
|
||||
114
.github/workflows/api-build-lint-push-containers.yml
vendored
Normal file
114
.github/workflows/api-build-lint-push-containers.yml
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
name: API - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "api/**"
|
||||
- ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the code below to test this action on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "api/**"
|
||||
# - ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
STABLE_TAG: stable
|
||||
|
||||
WORKING_DIRECTORY: ./api
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-api-deploy
|
||||
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'
|
||||
@@ -9,14 +9,21 @@
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
name: API - CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", "v3", "v4.*" ]
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "master", "v3", "v4.*" ]
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
@@ -37,21 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
203
.github/workflows/api-pull-request.yml
vendored
Normal file
203
.github/workflows/api-pull-request.yml
vendored
Normal file
@@ -0,0 +1,203 @@
|
||||
name: API - Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- ".github/workflows/api-pull-request.yml"
|
||||
- "api/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
|
||||
env:
|
||||
POSTGRES_HOST: localhost
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_ADMIN_USER: prowler
|
||||
POSTGRES_ADMIN_PASSWORD: S3cret
|
||||
POSTGRES_USER: prowler_user
|
||||
POSTGRES_PASSWORD: prowler
|
||||
POSTGRES_DB: postgres-db
|
||||
VALKEY_HOST: localhost
|
||||
VALKEY_PORT: 6379
|
||||
VALKEY_DB: 0
|
||||
API_WORKING_DIR: ./api
|
||||
IMAGE_NAME: prowler-api
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.12"]
|
||||
|
||||
# Service containers to run with `test`
|
||||
services:
|
||||
# Label used to access the service container
|
||||
postgres:
|
||||
image: postgres
|
||||
env:
|
||||
POSTGRES_HOST: ${{ env.POSTGRES_HOST }}
|
||||
POSTGRES_PORT: ${{ env.POSTGRES_PORT }}
|
||||
POSTGRES_USER: ${{ env.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ env.POSTGRES_DB }}
|
||||
# Set health checks to wait until postgres has started
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine3.19
|
||||
env:
|
||||
VALKEY_HOST: ${{ env.VALKEY_HOST }}
|
||||
VALKEY_PORT: ${{ env.VALKEY_PORT }}
|
||||
VALKEY_DB: ${{ env.VALKEY_DB }}
|
||||
# Set health checks to wait until postgres has started
|
||||
ports:
|
||||
- 6379:6379
|
||||
options: >-
|
||||
--health-cmd "valkey-cli ping"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: |
|
||||
api/.github/**
|
||||
api/docs/**
|
||||
api/permissions/**
|
||||
api/README.md
|
||||
api/mkdocs.yml
|
||||
|
||||
- name: Replace @master with current branch in pyproject.toml
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
|
||||
echo "Using branch: $BRANCH_NAME"
|
||||
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
|
||||
|
||||
- name: Install poetry
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Update poetry.lock after the branch name change
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
|
||||
- name: Poetry check
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry check --lock
|
||||
|
||||
- name: Lint with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff check . --exclude contrib
|
||||
|
||||
- name: Check Format with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff format --check . --exclude contrib
|
||||
|
||||
- name: Lint with pylint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
|
||||
- name: Bandit
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
|
||||
- name: Safety
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963,74429
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
|
||||
|
||||
- name: Hadolint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
- name: Test with pytest
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest --cov=./src/backend --cov-report=xml src/backend
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: api
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:latest
|
||||
outputs: type=docker
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
39
.github/workflows/backport.yml
vendored
39
.github/workflows/backport.yml
vendored
@@ -1,42 +1,47 @@
|
||||
name: Automatic Backport
|
||||
name: Prowler - Automatic Backport
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['labeled', 'closed']
|
||||
|
||||
env:
|
||||
# The prefix of the label that triggers the backport must not contain the branch name
|
||||
# so, for example, if the branch is 'master', the label should be 'backport-to-<branch>'
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_IGNORE: was-backported
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport PR
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport'))
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
# Workaround not to fail the workflow if the PR does not need a backport
|
||||
# https://github.com/sorenlouv/backport-github-action/issues/127#issuecomment-2258561266
|
||||
- name: Check for backport labels
|
||||
id: check_labels
|
||||
run: |-
|
||||
labels='${{ toJSON(github.event.pull_request.labels.*.name) }}'
|
||||
echo "$labels"
|
||||
matched=$(echo "${labels}" | jq '. | map(select(startswith("backport-to-"))) | length')
|
||||
echo "matched=$matched"
|
||||
echo "matched=$matched" >> $GITHUB_OUTPUT
|
||||
- name: Check labels
|
||||
id: preview_label_check
|
||||
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
|
||||
with:
|
||||
allow_failure: true
|
||||
prefix_mode: true
|
||||
any_of: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
none_of: ${{ env.BACKPORT_LABEL_IGNORE }}
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Backport Action
|
||||
if: fromJSON(steps.check_labels.outputs.matched) > 0
|
||||
uses: sorenlouv/backport-github-action@v9.5.1
|
||||
if: steps.preview_label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: backport-to-
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
|
||||
- name: Info log
|
||||
if: ${{ success() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
|
||||
if: ${{ success() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
run: cat ~/.backport/backport.info.log
|
||||
|
||||
- name: Debug log
|
||||
if: ${{ failure() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
|
||||
if: ${{ failure() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
run: cat ~/.backport/backport.debug.log
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
name: Pull Request Documentation Link
|
||||
name: Prowler - Pull Request Documentation Link
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Leave PR comment with the Prowler Documentation URI
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
body: |
|
||||
|
||||
23
.github/workflows/conventional-commit.yml
vendored
Normal file
23
.github/workflows/conventional-commit.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: Prowler - Conventional Commit
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- "opened"
|
||||
- "edited"
|
||||
- "synchronize"
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
|
||||
jobs:
|
||||
conventional-commit-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: conventional-commit-check
|
||||
id: conventional-commit-check
|
||||
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
|
||||
with:
|
||||
pr-title-regex: '^([^\s(]+)(?:\(([^)]+)\))?: (.+)'
|
||||
8
.github/workflows/find-secrets.yml
vendored
8
.github/workflows/find-secrets.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: find-secrets
|
||||
name: Prowler - Find secrets
|
||||
|
||||
on: pull_request
|
||||
|
||||
@@ -7,13 +7,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.84.1
|
||||
uses: trufflesecurity/trufflehog@b06f6d72a3791308bb7ba59c2b8cb7a083bd17e4 # v3.88.26
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
extra_args: --only-verified
|
||||
extra_args: --only-verified
|
||||
|
||||
4
.github/workflows/labeler.yml
vendored
4
.github/workflows/labeler.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: "Pull Request Labeler"
|
||||
name: Prowler - PR Labeler
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
@@ -14,4 +14,4 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
|
||||
34
.github/workflows/pull-request-merged.yml
vendored
Normal file
34
.github/workflows/pull-request-merged.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
name: Prowler - Merged Pull Request
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['closed']
|
||||
|
||||
jobs:
|
||||
trigger-cloud-pull-request:
|
||||
name: Trigger Cloud Pull Request
|
||||
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Trigger pull request
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-pull-request-merged
|
||||
client-payload: '{
|
||||
"PROWLER_COMMIT_SHA": "${{ github.sha }}",
|
||||
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
|
||||
"PROWLER_PR_TITLE": "${{ github.event.pull_request.title }}",
|
||||
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
|
||||
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }}
|
||||
}'
|
||||
96
.github/workflows/pull-request.yml
vendored
96
.github/workflows/pull-request.yml
vendored
@@ -1,96 +0,0 @@
|
||||
name: pr-lint-test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
.github/**
|
||||
README.md
|
||||
docs/**
|
||||
permissions/**
|
||||
mkdocs.yml
|
||||
.backportrc.json
|
||||
- name: Install poetry
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
- name: Install dependencies
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
- name: Poetry check
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock --check
|
||||
- name: Lint with flake8
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
|
||||
- name: Checking format with black
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run black --check .
|
||||
- name: Lint with pylint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
- name: Bandit
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
- name: Safety
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612
|
||||
- name: Vulture
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib" --min-confidence 100 .
|
||||
- name: Hadolint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
- name: Test with pytest
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -1,14 +1,20 @@
|
||||
name: build-lint-push-containers
|
||||
name: SDK - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# For `v3-latest`
|
||||
- "v3"
|
||||
# For `v4-latest`
|
||||
- "v4.6"
|
||||
# For `latest`
|
||||
- "master"
|
||||
paths-ignore:
|
||||
- ".github/**"
|
||||
- "README.md"
|
||||
- "docs/**"
|
||||
- "ui/**"
|
||||
- "api/**"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
@@ -36,6 +42,10 @@ env:
|
||||
# Python configuration
|
||||
PYTHON_VERSION: 3.12
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler
|
||||
|
||||
jobs:
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
@@ -49,16 +59,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx install poetry==2.*
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version
|
||||
@@ -79,7 +89,13 @@ jobs:
|
||||
echo "STABLE_TAG=v3-stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
|
||||
4)
|
||||
echo "LATEST_TAG=v4-latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=v4-stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
5)
|
||||
echo "LATEST_TAG=latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
@@ -92,13 +108,13 @@ jobs:
|
||||
esac
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
@@ -107,23 +123,24 @@ jobs:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
@@ -134,6 +151,8 @@ jobs:
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.PROWLER_VERSION }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.PROWLER_VERSION }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
145
.github/workflows/sdk-bump-version.yml
vendored
Normal file
145
.github/workflows/sdk-bump-version.yml
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
name: SDK - Bump Version
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
jobs:
|
||||
bump-version:
|
||||
name: Bump Version
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Get Prowler version
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
FIX_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
# Export version components to GitHub environment
|
||||
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "FIX_VERSION=${FIX_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
if (( MAJOR_VERSION == 5 )); then
|
||||
if (( FIX_VERSION == 0 )); then
|
||||
echo "Minor Release: $PROWLER_VERSION"
|
||||
|
||||
# Set up next minor version for master
|
||||
BUMP_VERSION_TO=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).${FIX_VERSION}
|
||||
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
TARGET_BRANCH=${BASE_BRANCH}
|
||||
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Set up patch version for version branch
|
||||
PATCH_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.1
|
||||
echo "PATCH_VERSION_TO=${PATCH_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Bumping to next minor version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
|
||||
echo "Bumping to next patch version: ${PATCH_VERSION_TO} in branch ${VERSION_BRANCH}"
|
||||
else
|
||||
echo "Patch Release: $PROWLER_VERSION"
|
||||
|
||||
BUMP_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.$((FIX_VERSION + 1))
|
||||
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
TARGET_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Bumping to next patch version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
|
||||
fi
|
||||
else
|
||||
echo "Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Bump versions in files
|
||||
run: |
|
||||
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
|
||||
echo "Using BUMP_VERSION_TO=$BUMP_VERSION_TO"
|
||||
|
||||
set -e
|
||||
|
||||
echo "Bumping version in pyproject.toml ..."
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${BUMP_VERSION_TO}\"|" pyproject.toml
|
||||
|
||||
echo "Bumping version in prowler/config/config.py ..."
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${BUMP_VERSION_TO}\"|" prowler/config/config.py
|
||||
|
||||
echo "Bumping version in .env ..."
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${BUMP_VERSION_TO}|" .env
|
||||
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.TARGET_BRANCH }}
|
||||
commit-message: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.BUMP_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler version to v${{ env.BUMP_VERSION_TO }}
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Handle patch version for minor release
|
||||
if: env.FIX_VERSION == '0'
|
||||
run: |
|
||||
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
|
||||
echo "Using PATCH_VERSION_TO=$PATCH_VERSION_TO"
|
||||
|
||||
set -e
|
||||
|
||||
echo "Bumping version in pyproject.toml ..."
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${PATCH_VERSION_TO}\"|" pyproject.toml
|
||||
|
||||
echo "Bumping version in prowler/config/config.py ..."
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${PATCH_VERSION_TO}\"|" prowler/config/config.py
|
||||
|
||||
echo "Bumping version in .env ..."
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PATCH_VERSION_TO}|" .env
|
||||
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create Pull Request for patch version
|
||||
if: env.FIX_VERSION == '0'
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.PATCH_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler version to v${{ env.PATCH_VERSION_TO }}
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
67
.github/workflows/sdk-codeql.yml
vendored
Normal file
67
.github/workflows/sdk-codeql.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: SDK - CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
219
.github/workflows/sdk-pull-request.yml
vendored
Normal file
219
.github/workflows/sdk-pull-request.yml
vendored
Normal file
@@ -0,0 +1,219 @@
|
||||
name: SDK - Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
.github/**
|
||||
docs/**
|
||||
permissions/**
|
||||
api/**
|
||||
ui/**
|
||||
prowler/CHANGELOG.md
|
||||
README.md
|
||||
mkdocs.yml
|
||||
.backportrc.json
|
||||
.env
|
||||
docker-compose*
|
||||
examples/**
|
||||
.gitignore
|
||||
|
||||
- name: Install poetry
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
|
||||
- name: Poetry check
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry check --lock
|
||||
|
||||
- name: Lint with flake8
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api
|
||||
|
||||
- name: Checking format with black
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run black --exclude api ui --check .
|
||||
|
||||
- name: Lint with pylint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
|
||||
- name: Bandit
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/,./api/,./ui' -r .
|
||||
|
||||
- name: Safety
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612 -r pyproject.toml
|
||||
|
||||
- name: Vulture
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
|
||||
|
||||
- name: Hadolint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
# Test AWS
|
||||
- name: AWS - Check if any file has changed
|
||||
id: aws-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/aws/**
|
||||
./tests/providers/aws/**
|
||||
.poetry.lock
|
||||
|
||||
- name: AWS - Test
|
||||
if: steps.aws-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
|
||||
|
||||
# Test Azure
|
||||
- name: Azure - Check if any file has changed
|
||||
id: azure-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/azure/**
|
||||
./tests/providers/azure/**
|
||||
.poetry.lock
|
||||
|
||||
- name: Azure - Test
|
||||
if: steps.azure-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/azure --cov-report=xml:azure_coverage.xml tests/providers/azure
|
||||
|
||||
# Test GCP
|
||||
- name: GCP - Check if any file has changed
|
||||
id: gcp-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/gcp/**
|
||||
./tests/providers/gcp/**
|
||||
.poetry.lock
|
||||
|
||||
- name: GCP - Test
|
||||
if: steps.gcp-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/gcp --cov-report=xml:gcp_coverage.xml tests/providers/gcp
|
||||
|
||||
# Test Kubernetes
|
||||
- name: Kubernetes - Check if any file has changed
|
||||
id: kubernetes-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/kubernetes/**
|
||||
./tests/providers/kubernetes/**
|
||||
.poetry.lock
|
||||
|
||||
- name: Kubernetes - Test
|
||||
if: steps.kubernetes-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/kubernetes --cov-report=xml:kubernetes_coverage.xml tests/providers/kubernetes
|
||||
|
||||
# Test NHN
|
||||
- name: NHN - Check if any file has changed
|
||||
id: nhn-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/nhn/**
|
||||
./tests/providers/nhn/**
|
||||
.poetry.lock
|
||||
|
||||
- name: NHN - Test
|
||||
if: steps.nhn-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/nhn --cov-report=xml:nhn_coverage.xml tests/providers/nhn
|
||||
|
||||
# Test M365
|
||||
- name: M365 - Check if any file has changed
|
||||
id: m365-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/m365/**
|
||||
./tests/providers/m365/**
|
||||
.poetry.lock
|
||||
|
||||
- name: M365 - Test
|
||||
if: steps.m365-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
|
||||
|
||||
# Common Tests
|
||||
- name: Lib - Test
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/lib --cov-report=xml:lib_coverage.xml tests/lib
|
||||
|
||||
- name: Config - Test
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/config --cov-report=xml:config_coverage.xml tests/config
|
||||
|
||||
# Codecov
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler
|
||||
files: ./aws_coverage.xml,./azure_coverage.xml,./gcp_coverage.xml,./kubernetes_coverage.xml,./nhn_coverage.xml,./m365_coverage.xml,./lib_coverage.xml,./config_coverage.xml
|
||||
@@ -1,4 +1,4 @@
|
||||
name: pypi-release
|
||||
name: SDK - PyPI release
|
||||
|
||||
on:
|
||||
release:
|
||||
@@ -7,15 +7,43 @@ on:
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: 3.11
|
||||
CACHE: "poetry"
|
||||
# CACHE: "poetry"
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
release-prowler-job:
|
||||
runs-on: ubuntu-latest
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
name: Release Prowler to PyPI
|
||||
steps:
|
||||
- name: Repository check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ "${{ github.repository }}" != "prowler-cloud/prowler" ]]; then
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Get Prowler version
|
||||
run: |
|
||||
PROWLER_VERSION="${{ env.RELEASE_TAG }}"
|
||||
@@ -27,23 +55,26 @@ jobs:
|
||||
4)
|
||||
echo "Releasing Prowler v4 with tag ${PROWLER_VERSION}"
|
||||
;;
|
||||
5)
|
||||
echo "Releasing Prowler v5 with tag ${PROWLER_VERSION}"
|
||||
;;
|
||||
*)
|
||||
echo "Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: ${{ env.CACHE }}
|
||||
# cache: ${{ env.CACHE }}
|
||||
|
||||
- name: Build Prowler package
|
||||
run: |
|
||||
@@ -1,10 +1,10 @@
|
||||
# This is a basic workflow to help you get started with Actions
|
||||
|
||||
name: Refresh regions of AWS services
|
||||
name: SDK - Refresh AWS services' regions
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * *" #runs at 09:00 UTC everyday
|
||||
- cron: "0 9 * * 1" # runs at 09:00 UTC every Monday
|
||||
|
||||
env:
|
||||
GITHUB_BRANCH: "master"
|
||||
@@ -23,12 +23,12 @@ jobs:
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 # v4.1.0
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
@@ -50,12 +50,13 @@ jobs:
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services"
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws"
|
||||
title: "chore(regions_update): Changes in regions for AWS services"
|
||||
body: |
|
||||
### Description
|
||||
118
.github/workflows/ui-build-lint-push-containers.yml
vendored
Normal file
118
.github/workflows/ui-build-lint-push-containers.yml
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
name: UI - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "ui/**"
|
||||
- ".github/workflows/ui-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the below code to test this action on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "ui/**"
|
||||
# - ".github/workflows/ui-build-lint-push-containers.yml"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
STABLE_TAG: stable
|
||||
|
||||
WORKING_DIRECTORY: ./ui
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ env.SHORT_SHA }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${{ env.RELEASE_TAG }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-ui-deploy
|
||||
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'
|
||||
59
.github/workflows/ui-codeql.yml
vendored
Normal file
59
.github/workflows/ui-codeql.yml
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: UI - CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "ui/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "ui/**"
|
||||
schedule:
|
||||
- cron: "00 12 * * *"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: ["javascript"]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
62
.github/workflows/ui-pull-request.yml
vendored
Normal file
62
.github/workflows/ui-pull-request.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
name: UI - Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- ".github/workflows/ui-pull-request.yml"
|
||||
- "ui/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- "v5.*"
|
||||
paths:
|
||||
- 'ui/**'
|
||||
env:
|
||||
UI_WORKING_DIR: ./ui
|
||||
IMAGE_NAME: prowler-ui
|
||||
|
||||
jobs:
|
||||
test-and-coverage:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm install
|
||||
- name: Run Healthcheck
|
||||
working-directory: ./ui
|
||||
run: npm run healthcheck
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: ${{ env.UI_WORKING_DIR }}
|
||||
# Always build using `prod` target
|
||||
target: prod
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:latest
|
||||
outputs: type=docker
|
||||
build-args: |
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX
|
||||
14
.gitignore
vendored
14
.gitignore
vendored
@@ -12,6 +12,7 @@ build/
|
||||
/dist/
|
||||
*.egg-info/
|
||||
*/__pycache__/*.pyc
|
||||
.idea/
|
||||
|
||||
# Session
|
||||
Session.vim
|
||||
@@ -30,7 +31,7 @@ tags
|
||||
*.DS_Store
|
||||
|
||||
# Prowler output
|
||||
output/
|
||||
/output
|
||||
|
||||
# Prowler found secrets
|
||||
secrets-*/
|
||||
@@ -41,12 +42,18 @@ junit-reports/
|
||||
# VSCode files
|
||||
.vscode/
|
||||
|
||||
# Cursor files
|
||||
.cursorignore
|
||||
|
||||
# Terraform
|
||||
.terraform*
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# .env
|
||||
.env*
|
||||
ui/.env*
|
||||
api/.env*
|
||||
.env.local
|
||||
|
||||
# Coverage
|
||||
.coverage*
|
||||
@@ -55,3 +62,6 @@ coverage*
|
||||
|
||||
# Node
|
||||
node_modules
|
||||
|
||||
# Persistent data
|
||||
_data/
|
||||
|
||||
@@ -27,6 +27,7 @@ repos:
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.3.1
|
||||
@@ -58,11 +59,28 @@ repos:
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.8.0
|
||||
rev: 2.1.1
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
name: API - poetry-check
|
||||
args: ["--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-lock
|
||||
args: ["--no-update"]
|
||||
name: API - poetry-lock
|
||||
args: ["--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-check
|
||||
name: SDK - poetry-check
|
||||
args: ["--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-lock
|
||||
name: SDK - poetry-lock
|
||||
args: ["--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
@@ -90,19 +108,19 @@ repos:
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/' -r .'
|
||||
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'safety check --ignore 70612'
|
||||
entry: bash -c 'safety check --ignore 70612,66963,74429'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'vulture --exclude "contrib" --min-confidence 100 .'
|
||||
entry: bash -c 'vulture --exclude "contrib,.venv,api/src/backend/api/tests/,api/src/backend/conftest.py,api/src/backend/tasks/tests/" --min-confidence 100 .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
64
Dockerfile
64
Dockerfile
@@ -1,38 +1,64 @@
|
||||
FROM python:3.12-alpine
|
||||
FROM python:3.12.10-slim-bookworm AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
# Update system dependencies and install essential tools
|
||||
#hadolint ignore=DL3018
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl git
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends wget libicu72 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
else \
|
||||
echo "Unsupported architecture: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
mkdir -p /opt/microsoft/powershell/7 && \
|
||||
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
|
||||
chmod +x /opt/microsoft/powershell/7/pwsh && \
|
||||
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
|
||||
rm /tmp/powershell.tar.gz
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
|
||||
# Create non-root user
|
||||
RUN mkdir -p /home/prowler && \
|
||||
echo 'prowler:x:1000:1000:prowler:/home/prowler:' > /etc/passwd && \
|
||||
echo 'prowler:x:1000:' > /etc/group && \
|
||||
chown -R prowler:prowler /home/prowler
|
||||
USER prowler
|
||||
|
||||
# Copy necessary files
|
||||
WORKDIR /home/prowler
|
||||
|
||||
# Copy necessary files
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler
|
||||
COPY README.md /home/prowler/
|
||||
COPY prowler/providers/m365/lib/powershell/m365_powershell.py /home/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
|
||||
# Install Python dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
ENV PATH="$HOME/.local/bin:$PATH"
|
||||
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
|
||||
pip install --no-cache-dir .
|
||||
ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
# By default poetry does not compile Python source files to bytecode during installation.
|
||||
# This speeds up the installation process, but the first execution may take a little more
|
||||
# time because Python then compiles source files to bytecode automatically. If you want to
|
||||
# compile source files to bytecode during installation, you can use the --compile option
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
# Install PowerShell modules
|
||||
RUN poetry run python prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
RUN pip uninstall dash-html-components -y && \
|
||||
pip uninstall dash-core-components -y
|
||||
|
||||
# Remove Prowler directory and build files
|
||||
USER 0
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
|
||||
|
||||
USER prowler
|
||||
ENTRYPOINT ["prowler"]
|
||||
ENTRYPOINT ["poetry", "run", "prowler"]
|
||||
|
||||
166
README.md
166
README.md
@@ -3,7 +3,7 @@
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
|
||||
</p>
|
||||
<p align="center">
|
||||
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
</p>
|
||||
<p align="center">
|
||||
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
|
||||
@@ -29,7 +29,7 @@
|
||||
<p align="center">
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/issues"><img alt="Issues" src="https://img.shields.io/github/issues/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler?include_prereleases"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/release-date/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="Contributors" src="https://img.shields.io/github/contributors-anon/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="License" src="https://img.shields.io/github/license/prowler-cloud/prowler"></a>
|
||||
@@ -43,7 +43,15 @@
|
||||
|
||||
# Description
|
||||
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
|
||||
|
||||
## Prowler App
|
||||
|
||||
Prowler App is a web application that allows you to run Prowler in your cloud provider accounts and visualize the results in a user-friendly interface.
|
||||
|
||||

|
||||
|
||||
>More details at [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
@@ -63,51 +71,169 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 553 | 77 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 3 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 138 | 17 -> `prowler azure --list-services` | 4 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
| AWS | 564 | 82 | 33 | 10 |
|
||||
| GCP | 78 | 13 | 7 | 3 |
|
||||
| Azure | 140 | 18 | 7 | 3 |
|
||||
| Kubernetes | 83 | 7 | 4 | 7 |
|
||||
| M365 | 44 | 2 | 1 | 0 |
|
||||
| NHN (Unofficial) | 6 | 2 | 1 | 0 |
|
||||
|
||||
> You can list the checks, services, compliance frameworks and categories with `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
|
||||
|
||||
# 💻 Installation
|
||||
|
||||
## Pip package
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
|
||||
## Prowler App
|
||||
|
||||
Prowler App can be installed in different ways, depending on your environment:
|
||||
|
||||
> See how to use Prowler App in the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
|
||||
|
||||
### Docker Compose
|
||||
|
||||
**Requirements**
|
||||
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
**Commands**
|
||||
|
||||
``` console
|
||||
curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/master/docker-compose.yml
|
||||
curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/master/.env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
### From GitHub
|
||||
|
||||
**Requirements**
|
||||
|
||||
* `git` installed.
|
||||
* `poetry` v2 installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
**Commands to run the API**
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
eval $(poetry env activate)
|
||||
set -a
|
||||
source .env
|
||||
docker compose up postgres valkey -d
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
>
|
||||
> If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment.
|
||||
> In case you have any doubts, consult the Poetry environment activation guide: https://python-poetry.org/docs/managing-environments/#activating-the-environment
|
||||
|
||||
> Now, you can access the API documentation at http://localhost:8080/api/v1/docs.
|
||||
|
||||
**Commands to run the API Worker**
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
eval $(poetry env activate)
|
||||
set -a
|
||||
source .env
|
||||
cd src/backend
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
**Commands to run the API Scheduler**
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
eval $(poetry env activate)
|
||||
set -a
|
||||
source .env
|
||||
cd src/backend
|
||||
python -m celery -A config.celery beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
```
|
||||
|
||||
**Commands to run the UI**
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/ui
|
||||
npm install
|
||||
npm run build
|
||||
npm start
|
||||
```
|
||||
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
## Prowler CLI
|
||||
### Pip package
|
||||
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python > 3.9.1, < 3.13:
|
||||
|
||||
```console
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli-installation)
|
||||
|
||||
## Containers
|
||||
### Containers
|
||||
|
||||
The available versions of Prowler are the following:
|
||||
The available versions of Prowler CLI are the following:
|
||||
|
||||
- `latest`: in sync with `master` branch (bear in mind that it is not a stable version)
|
||||
- `v4-latest`: in sync with `v4` branch (bear in mind that it is not a stable version)
|
||||
- `v3-latest`: in sync with `v3` branch (bear in mind that it is not a stable version)
|
||||
- `<x.y.z>` (release): you can find the releases [here](https://github.com/prowler-cloud/prowler/releases), those are stable releases.
|
||||
- `stable`: this tag always point to the latest release.
|
||||
- `v4-stable`: this tag always point to the latest release for v4.
|
||||
- `v3-stable`: this tag always point to the latest release for v3.
|
||||
|
||||
The container images are available here:
|
||||
- Prowler CLI:
|
||||
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
|
||||
- Prowler App:
|
||||
- [DockerHub - Prowler UI](https://hub.docker.com/r/prowlercloud/prowler-ui/tags)
|
||||
- [DockerHub - Prowler API](https://hub.docker.com/r/prowlercloud/prowler-api/tags)
|
||||
|
||||
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
|
||||
### From GitHub
|
||||
|
||||
## From GitHub
|
||||
Python > 3.9.1, < 3.13 is required with pip and poetry:
|
||||
|
||||
Python >= 3.9, < 3.13 is required with pip and poetry:
|
||||
|
||||
```
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
poetry shell
|
||||
eval $(poetry env activate)
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
python prowler-cli.py -v
|
||||
```
|
||||
> [!IMPORTANT]
|
||||
> Starting from Poetry v2.0.0, `poetry shell` has been deprecated in favor of `poetry env activate`.
|
||||
>
|
||||
> If your poetry version is below 2.0.0 you must keep using `poetry shell` to activate your environment.
|
||||
> In case you have any doubts, consult the Poetry environment activation guide: https://python-poetry.org/docs/managing-environments/#activating-the-environment
|
||||
|
||||
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
# 📐✏️ High level architecture
|
||||
|
||||
## Prowler App
|
||||
The **Prowler App** consists of three main components:
|
||||
|
||||
- **Prowler UI**: A user-friendly web interface for running Prowler and viewing results, powered by Next.js.
|
||||
- **Prowler API**: The backend API that executes Prowler scans and stores the results, built with Django REST Framework.
|
||||
- **Prowler SDK**: A Python SDK that integrates with the Prowler CLI for advanced functionality.
|
||||
|
||||

|
||||
|
||||
## Prowler CLI
|
||||
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
|
||||
|
||||

|
||||
|
||||
58
api/.env.example
Normal file
58
api/.env.example
Normal file
@@ -0,0 +1,58 @@
|
||||
# Django settings
|
||||
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1
|
||||
DJANGO_BIND_ADDRESS=0.0.0.0
|
||||
DJANGO_PORT=8000
|
||||
DJANGO_DEBUG=False
|
||||
# Select one of [production|devel]
|
||||
DJANGO_SETTINGS_MODULE=config.django.[production|devel]
|
||||
# Select one of [ndjson|human_readable]
|
||||
DJANGO_LOGGING_FORMATTER=[ndjson|human_readable]
|
||||
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
|
||||
# Applies to both Django and Celery Workers
|
||||
DJANGO_LOGGING_LEVEL=INFO
|
||||
DJANGO_WORKERS=4 # Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_TOKEN_SIGNING_KEY=""
|
||||
DJANGO_TOKEN_VERIFYING_KEY=""
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME=30
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME=1440
|
||||
DJANGO_CACHE_MAX_AGE=3600
|
||||
DJANGO_STALE_WHILE_REVALIDATE=60
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY=""
|
||||
# Decide whether to allow Django manage database table partitions
|
||||
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
|
||||
DJANGO_CELERY_DEADLOCK_ATTEMPTS=5
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
|
||||
DJANGO_SENTRY_DSN=
|
||||
|
||||
# PostgreSQL settings
|
||||
# If running django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
POSTGRES_HOST=[localhost|postgres-db]
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_ADMIN_USER=prowler
|
||||
POSTGRES_ADMIN_PASSWORD=S3cret
|
||||
POSTGRES_USER=prowler_user
|
||||
POSTGRES_PASSWORD=S3cret
|
||||
POSTGRES_DB=prowler_db
|
||||
|
||||
# Valkey settings
|
||||
# If running django and celery on host, use localhost, else use 'valkey'
|
||||
VALKEY_HOST=[localhost|valkey]
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_DB=0
|
||||
|
||||
# Sentry settings
|
||||
SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
# Social login credentials
|
||||
DJANGO_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
DJANGO_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
DJANGO_GOOGLE_OAUTH_CALLBACK_URL=""
|
||||
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_ID=""
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
DJANGO_GITHUB_OAUTH_CALLBACK_URL=""
|
||||
|
||||
# Deletion Task Batch Size
|
||||
DJANGO_DELETION_BATCH_SIZE=5000
|
||||
168
api/.gitignore
vendored
Normal file
168
api/.gitignore
vendored
Normal file
@@ -0,0 +1,168 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
/_data/
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||
.pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
*.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
|
||||
# VSCode
|
||||
.vscode/
|
||||
91
api/.pre-commit-config.yaml
Normal file
91
api/.pre-commit-config.yaml
Normal file
@@ -0,0 +1,91 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: ["--unsafe"]
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: no-commit-to-branch
|
||||
- id: pretty-format-json
|
||||
args: ["--autofix", "--no-sort-keys", "--no-ensure-ascii"]
|
||||
exclude: 'src/backend/api/fixtures/dev/.*\.json$'
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
files: pyproject.toml
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.10.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
## PYTHON
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
args: ["--directory=src"]
|
||||
- id: poetry-lock
|
||||
args: ["--no-update", "--directory=src"]
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013", "Dockerfile"]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: bash -c 'poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: trufflehog
|
||||
name: TruffleHog
|
||||
description: Detect secrets in your data.
|
||||
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["commit", "push"]
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'poetry run bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'poetry run safety check --ignore 70612,66963,74429'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'poetry run vulture --exclude "contrib,.venv,tests,conftest.py" --min-confidence 100 .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
84
api/CHANGELOG.md
Normal file
84
api/CHANGELOG.md
Normal file
@@ -0,0 +1,84 @@
|
||||
# Prowler API Changelog
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
|
||||
## [v1.7.0] (Prowler v5.6.0)
|
||||
|
||||
### Added
|
||||
|
||||
- Added M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563).
|
||||
- Added a `compliance/` folder and ZIP‐export functionality for all compliance reports.[(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
|
||||
- Added a new API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
|
||||
|
||||
---
|
||||
|
||||
## [v1.6.0] (Prowler v5.5.0)
|
||||
|
||||
### Added
|
||||
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167).
|
||||
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289).
|
||||
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333).
|
||||
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378).
|
||||
- Added missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.4] (Prowler v5.4.4)
|
||||
|
||||
### Fixed
|
||||
- Fixed a bug with periodic tasks when trying to delete a provider ([#7466])(https://github.com/prowler-cloud/prowler/pull/7466).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.3] (Prowler v5.4.3)
|
||||
|
||||
### Fixed
|
||||
- Added duplicated scheduled scans handling ([#7401])(https://github.com/prowler-cloud/prowler/pull/7401).
|
||||
- Added environment variable to configure the deletion task batch size ([#7423])(https://github.com/prowler-cloud/prowler/pull/7423).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.2] (Prowler v5.4.2)
|
||||
|
||||
### Changed
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.1] (Prowler v5.4.1)
|
||||
|
||||
### Fixed
|
||||
- Added a handled response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183).
|
||||
- Fixed a race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172).
|
||||
- Handled exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283).
|
||||
|
||||
|
||||
### Added
|
||||
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167).
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.0] (Prowler v5.4.0)
|
||||
|
||||
### Added
|
||||
- Social login integration with Google and GitHub [(#6906)](https://github.com/prowler-cloud/prowler/pull/6906)
|
||||
- Add API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878).
|
||||
- Configurable Sentry integration [(#6874)](https://github.com/prowler-cloud/prowler/pull/6874)
|
||||
|
||||
### Changed
|
||||
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019).
|
||||
|
||||
---
|
||||
|
||||
## [v1.4.0] (Prowler v5.3.0)
|
||||
|
||||
### Changed
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700).
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800).
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863).
|
||||
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
|
||||
|
||||
---
|
||||
62
api/Dockerfile
Normal file
62
api/Dockerfile
Normal file
@@ -0,0 +1,62 @@
|
||||
FROM python:3.12.10-slim-bookworm AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/api"
|
||||
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends wget libicu72 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
else \
|
||||
echo "Unsupported architecture: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
mkdir -p /opt/microsoft/powershell/7 && \
|
||||
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
|
||||
chmod +x /opt/microsoft/powershell/7/pwsh && \
|
||||
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
|
||||
rm /tmp/powershell.tar.gz
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
|
||||
USER prowler
|
||||
|
||||
WORKDIR /home/prowler
|
||||
|
||||
COPY pyproject.toml ./
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
# Add `--no-root` to avoid installing the current project as a package
|
||||
RUN poetry install --no-root && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
|
||||
|
||||
WORKDIR /home/prowler/backend
|
||||
|
||||
# Development image
|
||||
FROM build AS dev
|
||||
|
||||
ENTRYPOINT ["../docker-entrypoint.sh", "dev"]
|
||||
|
||||
# Production image
|
||||
FROM build
|
||||
|
||||
ENTRYPOINT ["../docker-entrypoint.sh", "prod"]
|
||||
335
api/README.md
Normal file
335
api/README.md
Normal file
@@ -0,0 +1,335 @@
|
||||
# Description
|
||||
|
||||
This repository contains the JSON API and Task Runner components for Prowler, which facilitate a complete backend that interacts with the Prowler SDK and is used by the Prowler UI.
|
||||
|
||||
# Components
|
||||
The Prowler API is composed of the following components:
|
||||
|
||||
- The JSON API, which is an API built with Django Rest Framework.
|
||||
- The Celery worker, which is responsible for executing the background tasks that are defined in the JSON API.
|
||||
- The PostgreSQL database, which is used to store the data.
|
||||
- The Valkey database, which is an in-memory database which is used as a message broker for the Celery workers.
|
||||
|
||||
## Note about Valkey
|
||||
|
||||
[Valkey](https://valkey.io/) is an open source (BSD) high performance key/value datastore.
|
||||
|
||||
Valkey exposes a Redis 7.2 compliant API. Any service that exposes the Redis API can be used with Prowler API.
|
||||
|
||||
# Modify environment variables
|
||||
|
||||
Under the root path of the project, you can find a file called `.env.example`. This file shows all the environment variables that the project uses. You *must* create a new file called `.env` and set the values for the variables.
|
||||
|
||||
## Local deployment
|
||||
Keep in mind if you export the `.env` file to use it with local deployment that you will have to do it within the context of the Poetry interpreter, not before. Otherwise, variables will not be loaded properly.
|
||||
|
||||
To do this, you can run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
set -a
|
||||
source .env
|
||||
```
|
||||
|
||||
# 🚀 Production deployment
|
||||
## Docker deployment
|
||||
|
||||
This method requires `docker` and `docker compose`.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Build the base image
|
||||
|
||||
```console
|
||||
docker compose --profile prod build
|
||||
```
|
||||
|
||||
### Run the production service
|
||||
|
||||
This command will start the Django production server and the Celery worker and also the Valkey and PostgreSQL databases.
|
||||
|
||||
```console
|
||||
docker compose --profile prod up -d
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8080`.
|
||||
|
||||
> **NOTE:** notice how the port is different. When developing using docker, the port will be `8080` to prevent conflicts.
|
||||
|
||||
### View the Production Server Logs
|
||||
|
||||
To view the logs for any component (e.g., Django, Celery worker), you can use the following command with a wildcard. This command will follow logs for any container that matches the specified pattern:
|
||||
|
||||
```console
|
||||
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
|
||||
## Local deployment
|
||||
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
### Install all dependencies with Poetry
|
||||
|
||||
```console
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
## Start the PostgreSQL Database and Valkey
|
||||
|
||||
The PostgreSQL database (version 16.3) and Valkey (version 7) are required for the development environment. To make development easier, we have provided a `docker-compose` file that will start these components for you.
|
||||
|
||||
**Note:** Make sure to use the specified versions, as there are features in our setup that may not be compatible with older versions of PostgreSQL and Valkey.
|
||||
|
||||
|
||||
```console
|
||||
docker compose up postgres valkey -d
|
||||
```
|
||||
|
||||
## Deploy Django and the Celery worker
|
||||
|
||||
### Run migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
### Run the Celery worker
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
### Run the Django server with Gunicorn
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
|
||||
> By default, the Gunicorn server will try to use as many workers as your machine can handle. You can manually change that in the `src/backend/config/guniconf.py` file.
|
||||
|
||||
# 🧪 Development guide
|
||||
|
||||
## Local deployment
|
||||
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Start the PostgreSQL Database and Valkey
|
||||
|
||||
The PostgreSQL database (version 16.3) and Valkey (version 7) are required for the development environment. To make development easier, we have provided a `docker-compose` file that will start these components for you.
|
||||
|
||||
**Note:** Make sure to use the specified versions, as there are features in our setup that may not be compatible with older versions of PostgreSQL and Valkey.
|
||||
|
||||
|
||||
```console
|
||||
docker compose up postgres valkey -d
|
||||
```
|
||||
|
||||
### Install the Python dependencies
|
||||
|
||||
> You must have Poetry installed
|
||||
|
||||
```console
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
### Apply migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
### Run the Django development server
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py runserver
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8000`.
|
||||
All changes in the code will be automatically reloaded in the server.
|
||||
|
||||
### Run the Celery worker
|
||||
|
||||
```console
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
The Celery worker does not detect and reload changes in the code, so you need to restart it manually when you make changes.
|
||||
|
||||
## Docker deployment
|
||||
|
||||
This method requires `docker` and `docker compose`.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Build the base image
|
||||
|
||||
```console
|
||||
docker compose --profile dev build
|
||||
```
|
||||
|
||||
### Run the development service
|
||||
|
||||
This command will start the Django development server and the Celery worker and also the Valkey and PostgreSQL databases.
|
||||
|
||||
```console
|
||||
docker compose --profile dev up -d
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8080`.
|
||||
All changes in the code will be automatically reloaded in the server.
|
||||
|
||||
> **NOTE:** notice how the port is different. When developing using docker, the port will be `8080` to prevent conflicts.
|
||||
|
||||
### View the development server logs
|
||||
|
||||
To view the logs for any component (e.g., Django, Celery worker), you can use the following command with a wildcard. This command will follow logs for any container that matches the specified pattern:
|
||||
|
||||
```console
|
||||
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
```
|
||||
|
||||
## Applying migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
## Apply fixtures
|
||||
|
||||
Fixtures are used to populate the database with initial development data.
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
|
||||
```
|
||||
|
||||
> The default credentials are `dev@prowler.com:thisisapassword123` or `dev2@prowler.com:thisisapassword123`
|
||||
|
||||
## Run tests
|
||||
|
||||
Note that the tests will fail if you use the same `.env` file as the development environment.
|
||||
|
||||
For best results, run in a new shell with no environment variables set.
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
pytest
|
||||
```
|
||||
|
||||
# Custom commands
|
||||
|
||||
Django provides a way to create custom commands that can be run from the command line.
|
||||
|
||||
> These commands can be found in: ```prowler/api/src/backend/api/management/commands```
|
||||
|
||||
To run a custom command, you need to be in the `prowler/api/src/backend` directory and run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
python manage.py <command_name>
|
||||
```
|
||||
|
||||
## Generate dummy data
|
||||
|
||||
```console
|
||||
python manage.py findings --tenant
|
||||
<TENANT_ID> --findings <NUM_FINDINGS> --re
|
||||
sources <NUM_RESOURCES> --batch <TRANSACTION_BATCH_SIZE> --alias <ALIAS>
|
||||
```
|
||||
|
||||
This command creates, for a given tenant, a provider, scan and a set of findings and resources related altogether.
|
||||
|
||||
> Scan progress and state are updated in real time.
|
||||
> - 0-33%: Create resources.
|
||||
> - 33-66%: Create findings.
|
||||
> - 66%: Create resource-finding mapping.
|
||||
>
|
||||
> The last step is required to access the findings details, since the UI needs that to print all the information.
|
||||
|
||||
### Example
|
||||
|
||||
```console
|
||||
~/backend $ poetry run python manage.py findings --tenant
|
||||
fffb1893-3fc7-4623-a5d9-fae47da1c528 --findings 25000 --re
|
||||
sources 1000 --batch 5000 --alias test-script
|
||||
|
||||
Starting data population
|
||||
Tenant: fffb1893-3fc7-4623-a5d9-fae47da1c528
|
||||
Alias: test-script
|
||||
Resources: 1000
|
||||
Findings: 25000
|
||||
Batch size: 5000
|
||||
|
||||
|
||||
Creating resources...
|
||||
100%|███████████████████████| 1/1 [00:00<00:00, 7.72it/s]
|
||||
Resources created successfully.
|
||||
|
||||
|
||||
Creating findings...
|
||||
100%|███████████████████████| 5/5 [00:05<00:00, 1.09s/it]
|
||||
Findings created successfully.
|
||||
|
||||
|
||||
Creating resource-finding mappings...
|
||||
100%|███████████████████████| 5/5 [00:02<00:00, 1.81it/s]
|
||||
Resource-finding mappings created successfully.
|
||||
|
||||
|
||||
Successfully populated test data.
|
||||
```
|
||||
125
api/docker-compose.yml
Normal file
125
api/docker-compose.yml
Normal file
@@ -0,0 +1,125 @@
|
||||
services:
|
||||
api:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-api
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8000}:${DJANGO_PORT:-8000}"
|
||||
profiles:
|
||||
- prod
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "prod"
|
||||
|
||||
api-dev:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
target: dev
|
||||
image: prowler-api-dev
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=config.django.devel
|
||||
- DJANGO_LOGGING_FORMATTER=human_readable
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8080}:${DJANGO_PORT:-8080}"
|
||||
volumes:
|
||||
- "./src/backend:/home/prowler/backend"
|
||||
- "./pyproject.toml:/home/prowler/pyproject.toml"
|
||||
profiles:
|
||||
- dev
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "dev"
|
||||
|
||||
postgres:
|
||||
image: postgres:16.3-alpine
|
||||
ports:
|
||||
- "${POSTGRES_PORT:-5432}:${POSTGRES_PORT:-5432}"
|
||||
hostname: "postgres-db"
|
||||
volumes:
|
||||
- ./_data/postgres:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=${POSTGRES_ADMIN_USER:-prowler}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_ADMIN_PASSWORD:-S3cret}
|
||||
- POSTGRES_DB=${POSTGRES_DB:-prowler_db}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_ADMIN_USER:-prowler} -d ${POSTGRES_DB:-prowler_db}'"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine3.19
|
||||
ports:
|
||||
- "${VALKEY_PORT:-6379}:6379"
|
||||
hostname: "valkey"
|
||||
volumes:
|
||||
- ./_data/valkey:/data
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'valkey-cli ping'"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
worker:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "worker"
|
||||
|
||||
worker-beat:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "beat"
|
||||
71
api/docker-entrypoint.sh
Executable file
71
api/docker-entrypoint.sh
Executable file
@@ -0,0 +1,71 @@
|
||||
#!/bin/sh
|
||||
|
||||
|
||||
apply_migrations() {
|
||||
echo "Applying database migrations..."
|
||||
poetry run python manage.py migrate --database admin
|
||||
}
|
||||
|
||||
apply_fixtures() {
|
||||
echo "Applying Django fixtures..."
|
||||
for fixture in api/fixtures/dev/*.json; do
|
||||
if [ -f "$fixture" ]; then
|
||||
echo "Loading $fixture"
|
||||
poetry run python manage.py loaddata "$fixture" --database admin
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
start_dev_server() {
|
||||
echo "Starting the development server..."
|
||||
poetry run python manage.py runserver 0.0.0.0:"${DJANGO_PORT:-8080}"
|
||||
}
|
||||
|
||||
start_prod_server() {
|
||||
echo "Starting the Gunicorn server..."
|
||||
poetry run gunicorn -c config/guniconf.py config.wsgi:application
|
||||
}
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion -E --max-tasks-per-child 1
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
echo "Starting the worker-beat..."
|
||||
sleep 15
|
||||
poetry run python -m celery -A config.celery beat -l "${DJANGO_LOGGING_LEVEL:-info}" --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
}
|
||||
|
||||
manage_db_partitions() {
|
||||
if [ "${DJANGO_MANAGE_DB_PARTITIONS}" = "True" ]; then
|
||||
echo "Managing DB partitions..."
|
||||
# For now we skip the deletion of partitions until we define the data retention policy
|
||||
# --yes auto approves the operation without the need of an interactive terminal
|
||||
poetry run python manage.py pgpartition --using admin --skip-delete --yes
|
||||
fi
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
dev)
|
||||
apply_migrations
|
||||
apply_fixtures
|
||||
manage_db_partitions
|
||||
start_dev_server
|
||||
;;
|
||||
prod)
|
||||
apply_migrations
|
||||
manage_db_partitions
|
||||
start_prod_server
|
||||
;;
|
||||
worker)
|
||||
start_worker
|
||||
;;
|
||||
beat)
|
||||
start_worker_beat
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $0 {dev|prod|worker|beat}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
65
api/docs/partitions.md
Normal file
65
api/docs/partitions.md
Normal file
@@ -0,0 +1,65 @@
|
||||
# Partitions
|
||||
|
||||
## Overview
|
||||
|
||||
Partitions are used to split the data in a table into smaller chunks, allowing for more efficient querying and storage.
|
||||
|
||||
The Prowler API uses partitions to store findings. The partitions are created based on the UUIDv7 `id` field.
|
||||
|
||||
You can use the Prowler API without ever creating additional partitions. This documentation is only relevant if you want to manage partitions to gain additional query performance.
|
||||
|
||||
### Required Postgres Configuration
|
||||
|
||||
There are 3 configuration options that need to be set in the `postgres.conf` file to get the most performance out of the partitioning:
|
||||
|
||||
- `enable_partition_pruning = on` (default is on)
|
||||
- `enable_partitionwise_join = on` (default is off)
|
||||
- `enable_partitionwise_aggregate = on` (default is off)
|
||||
|
||||
For more information on these options, see the [Postgres documentation](https://www.postgresql.org/docs/current/runtime-config-query.html).
|
||||
|
||||
## Partitioning Strategy
|
||||
|
||||
The partitioning strategy is defined in the `api.partitions` module. The strategy is responsible for creating and deleting partitions based on the provided configuration.
|
||||
|
||||
## Managing Partitions
|
||||
|
||||
The application will run without any extra work on your part. If you want to add or delete partitions, you can use the following commands:
|
||||
|
||||
To manage the partitions, run `python manage.py pgpartition --using admin`
|
||||
|
||||
This command will generate a list of partitions to create and delete based on the provided configuration.
|
||||
|
||||
By default, the command will prompt you to accept the changes before applying them.
|
||||
|
||||
```shell
|
||||
Finding:
|
||||
+ 2024_nov
|
||||
name: 2024_nov
|
||||
from_values: 0192e505-9000-72c8-a47c-cce719d8fb93
|
||||
to_values: 01937f84-5418-7eb8-b2a6-e3be749e839d
|
||||
size_unit: months
|
||||
size_value: 1
|
||||
+ 2024_dec
|
||||
name: 2024_dec
|
||||
from_values: 01937f84-5800-7b55-879c-9cdb46f023f6
|
||||
to_values: 01941f29-7818-7f9f-b4be-20b05bb2f574
|
||||
size_unit: months
|
||||
size_value: 1
|
||||
|
||||
0 partitions will be deleted
|
||||
2 partitions will be created
|
||||
```
|
||||
|
||||
If you choose to apply the partitions, tables will be generated with the following format: `<table_name>_<year>_<month>`.
|
||||
|
||||
For more info on the partitioning manager, see https://github.com/SectorLabs/django-postgres-extra
|
||||
|
||||
### Changing the Partitioning Parameters
|
||||
|
||||
There are 4 environment variables that can be used to change the partitioning parameters:
|
||||
|
||||
- `DJANGO_MANAGE_DB_PARTITIONS`: Allow Django to manage database partitons. By default is set to `False`.
|
||||
- `FINDINGS_TABLE_PARTITION_MONTHS`: Set the months for each partition. Setting the partition monts to 1 will create partitions with a size of 1 natural month.
|
||||
- `FINDINGS_TABLE_PARTITION_COUNT`: Set the number of partitions to create
|
||||
- `FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS`: Set the number of months to keep partitions before deleting them. Setting this to `None` will keep partitions indefinitely.
|
||||
5486
api/poetry.lock
generated
Normal file
5486
api/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
61
api/pyproject.toml
Normal file
61
api/pyproject.toml
Normal file
@@ -0,0 +1,61 @@
|
||||
[build-system]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
|
||||
[project]
|
||||
authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django==5.1.8",
|
||||
"django-allauth==65.4.1",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
"django-cors-headers==4.4.0",
|
||||
"django-environ==0.11.2",
|
||||
"django-filter==24.3",
|
||||
"django-guid==3.5.0",
|
||||
"django-postgres-extra (>=2.0.8,<3.0.0)",
|
||||
"djangorestframework==3.15.2",
|
||||
"djangorestframework-jsonapi==7.0.2",
|
||||
"djangorestframework-simplejwt (>=5.3.1,<6.0.0)",
|
||||
"drf-nested-routers (>=0.94.1,<1.0.0)",
|
||||
"drf-spectacular==0.27.2",
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.6",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
"uuid6==2024.7.10"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.7.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.9"
|
||||
coverage = "7.5.4"
|
||||
django-silk = "5.3.2"
|
||||
docker = "7.1.0"
|
||||
freezegun = "1.5.1"
|
||||
marshmallow = ">=3.15.0,<4.0.0"
|
||||
mypy = "1.10.1"
|
||||
pylint = "3.2.5"
|
||||
pytest = "8.2.2"
|
||||
pytest-cov = "5.0.0"
|
||||
pytest-django = "4.8.0"
|
||||
pytest-env = "1.1.3"
|
||||
pytest-randomly = "3.15.0"
|
||||
pytest-xdist = "3.6.1"
|
||||
ruff = "0.5.0"
|
||||
safety = "3.2.9"
|
||||
tqdm = "4.67.1"
|
||||
vulture = "2.14"
|
||||
0
api/src/backend/__init__.py
Normal file
0
api/src/backend/__init__.py
Normal file
0
api/src/backend/api/__init__.py
Normal file
0
api/src/backend/api/__init__.py
Normal file
61
api/src/backend/api/adapters.py
Normal file
61
api/src/backend/api/adapters.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
|
||||
from django.db import transaction
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Membership, Role, Tenant, User, UserRoleRelationship
|
||||
|
||||
|
||||
class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
@staticmethod
|
||||
def get_user_by_email(email: str):
|
||||
try:
|
||||
return User.objects.get(email=email)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
def pre_social_login(self, request, sociallogin):
|
||||
# Link existing accounts with the same email address
|
||||
email = sociallogin.account.extra_data.get("email")
|
||||
if email:
|
||||
existing_user = self.get_user_by_email(email)
|
||||
if existing_user:
|
||||
sociallogin.connect(request, existing_user)
|
||||
|
||||
def save_user(self, request, sociallogin, form=None):
|
||||
"""
|
||||
Called after the user data is fully populated from the provider
|
||||
and is about to be saved to the DB for the first time.
|
||||
"""
|
||||
with transaction.atomic(using=MainRouter.admin_db):
|
||||
user = super().save_user(request, sociallogin, form)
|
||||
user.save(using=MainRouter.admin_db)
|
||||
social_account_name = sociallogin.account.extra_data.get("name")
|
||||
if social_account_name:
|
||||
user.name = social_account_name
|
||||
user.save(using=MainRouter.admin_db)
|
||||
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(
|
||||
name=f"{user.email.split('@')[0]} default tenant"
|
||||
)
|
||||
with rls_transaction(str(tenant.id)):
|
||||
Membership.objects.using(MainRouter.admin_db).create(
|
||||
user=user, tenant=tenant, role=Membership.RoleChoices.OWNER
|
||||
)
|
||||
role = Role.objects.using(MainRouter.admin_db).create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
return user
|
||||
3
api/src/backend/api/admin.py
Normal file
3
api/src/backend/api/admin.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
||||
12
api/src/backend/api/apps.py
Normal file
12
api/src/backend/api/apps.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "api"
|
||||
|
||||
def ready(self):
|
||||
from api import signals # noqa: F401
|
||||
from api.compliance import load_prowler_compliance
|
||||
|
||||
load_prowler_compliance()
|
||||
152
api/src/backend/api/base_views.py
Normal file
152
api/src/backend/api/base_views.py
Normal file
@@ -0,0 +1,152 @@
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import transaction
|
||||
from rest_framework import permissions
|
||||
from rest_framework.exceptions import NotAuthenticated
|
||||
from rest_framework.filters import SearchFilter
|
||||
from rest_framework_json_api import filters
|
||||
from rest_framework_json_api.views import ModelViewSet
|
||||
from rest_framework_simplejwt.authentication import JWTAuthentication
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import POSTGRES_USER_VAR, rls_transaction
|
||||
from api.filters import CustomDjangoFilterBackend
|
||||
from api.models import Role, Tenant
|
||||
from api.rbac.permissions import HasPermissions
|
||||
|
||||
|
||||
class BaseViewSet(ModelViewSet):
|
||||
authentication_classes = [JWTAuthentication]
|
||||
required_permissions = []
|
||||
permission_classes = [permissions.IsAuthenticated, HasPermissions]
|
||||
filter_backends = [
|
||||
filters.QueryParameterValidationFilter,
|
||||
filters.OrderingFilter,
|
||||
CustomDjangoFilterBackend,
|
||||
SearchFilter,
|
||||
]
|
||||
|
||||
filterset_fields = []
|
||||
search_fields = []
|
||||
|
||||
ordering_fields = "__all__"
|
||||
ordering = ["id"]
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
"""
|
||||
Sets required_permissions before permissions are checked.
|
||||
"""
|
||||
self.set_required_permissions()
|
||||
super().initial(request, *args, **kwargs)
|
||||
|
||||
def set_required_permissions(self):
|
||||
"""This is an abstract method that must be implemented by subclasses."""
|
||||
NotImplemented
|
||||
|
||||
def get_queryset(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class BaseRLSViewSet(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# Ideally, this logic would be in the `.setup()` method but DRF view sets don't call it
|
||||
# https://docs.djangoproject.com/en/5.1/ref/class-based-views/base/#django.views.generic.base.View.setup
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context["tenant_id"] = self.request.tenant_id
|
||||
return context
|
||||
|
||||
|
||||
class BaseTenantViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
tenant = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
try:
|
||||
# If the request is a POST, create the admin role
|
||||
if request.method == "POST":
|
||||
isinstance(tenant, dict) and self._create_admin_role(tenant.data["id"])
|
||||
except Exception as e:
|
||||
self._handle_creation_error(e, tenant)
|
||||
raise
|
||||
|
||||
return tenant
|
||||
|
||||
def _create_admin_role(self, tenant_id):
|
||||
Role.objects.using(MainRouter.admin_db).create(
|
||||
name="admin",
|
||||
tenant_id=tenant_id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
|
||||
def _handle_creation_error(self, error, tenant):
|
||||
if tenant.data.get("id"):
|
||||
try:
|
||||
Tenant.objects.using(MainRouter.admin_db).filter(
|
||||
id=tenant.data["id"]
|
||||
).delete()
|
||||
except ObjectDoesNotExist:
|
||||
pass # Tenant might not exist, handle gracefully
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
if (
|
||||
request.resolver_match.url_name != "tenant-detail"
|
||||
and request.method != "DELETE"
|
||||
):
|
||||
user_id = str(request.user.id)
|
||||
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
# TODO: DRY this when we have time
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
class BaseUserViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# TODO refactor after improving RLS on users
|
||||
if request.stream is not None and request.stream.method == "POST":
|
||||
return super().initial(request, *args, **kwargs)
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
235
api/src/backend/api/compliance.py
Normal file
235
api/src/backend/api/compliance.py
Normal file
@@ -0,0 +1,235 @@
|
||||
from types import MappingProxyType
|
||||
|
||||
from api.models import Provider
|
||||
from prowler.config.config import get_available_compliance_frameworks
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = {}
|
||||
PROWLER_CHECKS = {}
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS = {}
|
||||
|
||||
|
||||
def get_compliance_frameworks(provider_type: Provider.ProviderChoices) -> list[str]:
|
||||
"""
|
||||
Retrieve and cache the list of available compliance frameworks for a specific cloud provider.
|
||||
|
||||
This function lazily loads and caches the available compliance frameworks (e.g., CIS, MITRE, ISO)
|
||||
for each provider type (AWS, Azure, GCP, etc.) on first access. Subsequent calls for the same
|
||||
provider will return the cached result.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The cloud provider type for which to retrieve
|
||||
available compliance frameworks (e.g., "aws", "azure", "gcp", "m365").
|
||||
|
||||
Returns:
|
||||
list[str]: A list of framework identifiers (e.g., "cis_1.4_aws", "mitre_attack_azure") available
|
||||
for the given provider.
|
||||
"""
|
||||
global AVAILABLE_COMPLIANCE_FRAMEWORKS
|
||||
if provider_type not in AVAILABLE_COMPLIANCE_FRAMEWORKS:
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type] = (
|
||||
get_available_compliance_frameworks(provider_type)
|
||||
)
|
||||
|
||||
return AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type]
|
||||
|
||||
|
||||
def get_prowler_provider_checks(provider_type: Provider.ProviderChoices):
|
||||
"""
|
||||
Retrieve all check IDs for the specified provider type.
|
||||
|
||||
This function fetches the check metadata for the given cloud provider
|
||||
and returns an iterable of check IDs.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The provider type
|
||||
(e.g., 'aws', 'azure') for which to retrieve check IDs.
|
||||
|
||||
Returns:
|
||||
Iterable[str]: An iterable of check IDs associated with the specified provider type.
|
||||
"""
|
||||
return CheckMetadata.get_bulk(provider_type).keys()
|
||||
|
||||
|
||||
def get_prowler_provider_compliance(provider_type: Provider.ProviderChoices) -> dict:
|
||||
"""
|
||||
Retrieve the Prowler compliance data for a specified provider type.
|
||||
|
||||
This function fetches the compliance frameworks and their associated
|
||||
requirements for the given cloud provider.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The provider type
|
||||
(e.g., 'aws', 'azure') for which to retrieve compliance data.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary mapping compliance framework names to their respective
|
||||
Compliance objects for the specified provider.
|
||||
"""
|
||||
return Compliance.get_bulk(provider_type)
|
||||
|
||||
|
||||
def load_prowler_compliance():
|
||||
"""
|
||||
Load and initialize the Prowler compliance data and checks for all provider types.
|
||||
|
||||
This function retrieves compliance data for all supported provider types,
|
||||
generates a compliance overview template, and populates the global variables
|
||||
`PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE` and `PROWLER_CHECKS` with read-only mappings
|
||||
of the compliance templates and checks, respectively.
|
||||
"""
|
||||
global PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
|
||||
global PROWLER_CHECKS
|
||||
|
||||
prowler_compliance = {
|
||||
provider_type: get_prowler_provider_compliance(provider_type)
|
||||
for provider_type in Provider.ProviderChoices.values
|
||||
}
|
||||
template = generate_compliance_overview_template(prowler_compliance)
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = MappingProxyType(template)
|
||||
PROWLER_CHECKS = MappingProxyType(load_prowler_checks(prowler_compliance))
|
||||
|
||||
|
||||
def load_prowler_checks(prowler_compliance):
|
||||
"""
|
||||
Generate a mapping of checks to the compliance frameworks that include them.
|
||||
|
||||
This function processes the provided compliance data and creates a dictionary
|
||||
mapping each provider type to a dictionary where each check ID maps to a set
|
||||
of compliance names that include that check.
|
||||
|
||||
Args:
|
||||
prowler_compliance (dict): The compliance data for all provider types,
|
||||
as returned by `get_prowler_provider_compliance`.
|
||||
|
||||
Returns:
|
||||
dict: A nested dictionary where the first-level keys are provider types,
|
||||
and the values are dictionaries mapping check IDs to sets of compliance names.
|
||||
"""
|
||||
checks = {}
|
||||
for provider_type in Provider.ProviderChoices.values:
|
||||
checks[provider_type] = {
|
||||
check_id: set() for check_id in get_prowler_provider_checks(provider_type)
|
||||
}
|
||||
for compliance_name, compliance_data in prowler_compliance[
|
||||
provider_type
|
||||
].items():
|
||||
for requirement in compliance_data.Requirements:
|
||||
for check in requirement.Checks:
|
||||
try:
|
||||
checks[provider_type][check].add(compliance_name)
|
||||
except KeyError:
|
||||
continue
|
||||
return checks
|
||||
|
||||
|
||||
def generate_scan_compliance(
|
||||
compliance_overview, provider_type: str, check_id: str, status: str
|
||||
):
|
||||
"""
|
||||
Update the compliance overview with the status of a specific check.
|
||||
|
||||
This function updates the compliance overview by setting the status of the given check
|
||||
within all compliance frameworks and requirements that include it. It then updates the
|
||||
requirement status to 'FAIL' if any of its checks have failed, and adjusts the counts
|
||||
of passed and failed requirements in the compliance overview.
|
||||
|
||||
Args:
|
||||
compliance_overview (dict): The compliance overview data structure to update.
|
||||
provider_type (str): The provider type (e.g., 'aws', 'azure') associated with the check.
|
||||
check_id (str): The identifier of the check whose status is being updated.
|
||||
status (str): The status of the check (e.g., 'PASS', 'FAIL', 'MUTED').
|
||||
|
||||
Returns:
|
||||
None: This function modifies the compliance_overview in place.
|
||||
"""
|
||||
for compliance_id in PROWLER_CHECKS[provider_type][check_id]:
|
||||
for requirement in compliance_overview[compliance_id]["requirements"].values():
|
||||
if check_id in requirement["checks"]:
|
||||
requirement["checks"][check_id] = status
|
||||
requirement["checks_status"][status.lower()] += 1
|
||||
|
||||
if requirement["status"] != "FAIL" and any(
|
||||
value == "FAIL" for value in requirement["checks"].values()
|
||||
):
|
||||
requirement["status"] = "FAIL"
|
||||
compliance_overview[compliance_id]["requirements_status"]["passed"] -= 1
|
||||
compliance_overview[compliance_id]["requirements_status"]["failed"] += 1
|
||||
|
||||
|
||||
def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
"""
|
||||
Generate a compliance overview template for all provider types.
|
||||
|
||||
This function creates a nested dictionary structure representing the compliance
|
||||
overview template for each provider type, compliance framework, and requirement.
|
||||
It initializes the status of all checks and requirements, and calculates initial
|
||||
counts for requirements status.
|
||||
|
||||
Args:
|
||||
prowler_compliance (dict): The compliance data for all provider types,
|
||||
as returned by `get_prowler_provider_compliance`.
|
||||
|
||||
Returns:
|
||||
dict: A nested dictionary representing the compliance overview template,
|
||||
structured by provider type and compliance framework.
|
||||
"""
|
||||
template = {}
|
||||
for provider_type in Provider.ProviderChoices.values:
|
||||
provider_compliance = template.setdefault(provider_type, {})
|
||||
compliance_data_dict = prowler_compliance[provider_type]
|
||||
|
||||
for compliance_name, compliance_data in compliance_data_dict.items():
|
||||
compliance_requirements = {}
|
||||
requirements_status = {"passed": 0, "failed": 0, "manual": 0}
|
||||
total_requirements = 0
|
||||
|
||||
for requirement in compliance_data.Requirements:
|
||||
total_requirements += 1
|
||||
total_checks = len(requirement.Checks)
|
||||
checks_dict = {check: None for check in requirement.Checks}
|
||||
|
||||
# Build requirement dictionary
|
||||
requirement_dict = {
|
||||
"name": requirement.Name or requirement.Id,
|
||||
"description": requirement.Description,
|
||||
"attributes": [
|
||||
dict(attribute) for attribute in requirement.Attributes
|
||||
],
|
||||
"checks": checks_dict,
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": total_checks,
|
||||
},
|
||||
"status": "PASS",
|
||||
}
|
||||
|
||||
# Update requirements status
|
||||
if total_checks == 0:
|
||||
requirements_status["manual"] += 1
|
||||
|
||||
# Add requirement to compliance requirements
|
||||
compliance_requirements[requirement.Id] = requirement_dict
|
||||
|
||||
# Calculate pending requirements
|
||||
pending_requirements = total_requirements - requirements_status["manual"]
|
||||
requirements_status["passed"] = pending_requirements
|
||||
|
||||
# Build compliance dictionary
|
||||
compliance_dict = {
|
||||
"framework": compliance_data.Framework,
|
||||
"version": compliance_data.Version,
|
||||
"provider": provider_type,
|
||||
"description": compliance_data.Description,
|
||||
"requirements": compliance_requirements,
|
||||
"requirements_status": requirements_status,
|
||||
"total_requirements": total_requirements,
|
||||
}
|
||||
|
||||
# Add compliance to provider compliance
|
||||
provider_compliance[compliance_name] = compliance_dict
|
||||
|
||||
return template
|
||||
29
api/src/backend/api/db_router.py
Normal file
29
api/src/backend/api/db_router.py
Normal file
@@ -0,0 +1,29 @@
|
||||
ALLOWED_APPS = ("django", "socialaccount", "account", "authtoken", "silk")
|
||||
|
||||
|
||||
class MainRouter:
|
||||
default_db = "default"
|
||||
admin_db = "admin"
|
||||
|
||||
def db_for_read(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if model_table_name.startswith("django_") or any(
|
||||
model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS
|
||||
):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def db_for_write(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if any(model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def allow_migrate(self, db, app_label, model_name=None, **hints): # noqa: F841
|
||||
return db == self.admin_db
|
||||
|
||||
def allow_relation(self, obj1, obj2, **hints): # noqa: F841
|
||||
# Allow relations if both objects are in either "default" or "admin" db connectors
|
||||
if {obj1._state.db, obj2._state.db} <= {self.default_db, self.admin_db}:
|
||||
return True
|
||||
return None
|
||||
347
api/src/backend/api/db_utils.py
Normal file
347
api/src/backend/api/db_utils.py
Normal file
@@ -0,0 +1,347 @@
|
||||
import secrets
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import BaseUserManager
|
||||
from django.db import connection, models, transaction
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from psycopg2 import connect as psycopg2_connect
|
||||
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
DB_USER = settings.DATABASES["default"]["USER"] if not settings.TESTING else "test"
|
||||
DB_PASSWORD = (
|
||||
settings.DATABASES["default"]["PASSWORD"] if not settings.TESTING else "test"
|
||||
)
|
||||
DB_PROWLER_USER = (
|
||||
settings.DATABASES["prowler_user"]["USER"] if not settings.TESTING else "test"
|
||||
)
|
||||
DB_PROWLER_PASSWORD = (
|
||||
settings.DATABASES["prowler_user"]["PASSWORD"] if not settings.TESTING else "test"
|
||||
)
|
||||
TASK_RUNNER_DB_TABLE = "django_celery_results_taskresult"
|
||||
POSTGRES_TENANT_VAR = "api.tenant_id"
|
||||
POSTGRES_USER_VAR = "api.user_id"
|
||||
|
||||
SET_CONFIG_QUERY = "SELECT set_config(%s, %s::text, TRUE);"
|
||||
|
||||
|
||||
@contextmanager
|
||||
def psycopg_connection(database_alias: str):
|
||||
psycopg2_connection = None
|
||||
try:
|
||||
admin_db = settings.DATABASES[database_alias]
|
||||
|
||||
psycopg2_connection = psycopg2_connect(
|
||||
dbname=admin_db["NAME"],
|
||||
user=admin_db["USER"],
|
||||
password=admin_db["PASSWORD"],
|
||||
host=admin_db["HOST"],
|
||||
port=admin_db["PORT"],
|
||||
)
|
||||
yield psycopg2_connection
|
||||
finally:
|
||||
if psycopg2_connection is not None:
|
||||
psycopg2_connection.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def rls_transaction(value: str, parameter: str = POSTGRES_TENANT_VAR):
|
||||
"""
|
||||
Creates a new database transaction setting the given configuration value for Postgres RLS. It validates the
|
||||
if the value is a valid UUID.
|
||||
|
||||
Args:
|
||||
value (str): Database configuration parameter value.
|
||||
parameter (str): Database configuration parameter name, by default is 'api.tenant_id'.
|
||||
"""
|
||||
with transaction.atomic():
|
||||
with connection.cursor() as cursor:
|
||||
try:
|
||||
# just in case the value is an UUID object
|
||||
uuid.UUID(str(value))
|
||||
except ValueError:
|
||||
raise ValidationError("Must be a valid UUID")
|
||||
cursor.execute(SET_CONFIG_QUERY, [parameter, value])
|
||||
yield cursor
|
||||
|
||||
|
||||
class CustomUserManager(BaseUserManager):
|
||||
def create_user(self, email, password=None, **extra_fields):
|
||||
if not email:
|
||||
raise ValueError("The email field must be set")
|
||||
email = self.normalize_email(email)
|
||||
user = self.model(email=email, **extra_fields)
|
||||
user.set_password(password)
|
||||
user.save(using=self._db)
|
||||
return user
|
||||
|
||||
def get_by_natural_key(self, email):
|
||||
return self.get(email__iexact=email)
|
||||
|
||||
|
||||
def enum_to_choices(enum_class):
|
||||
"""
|
||||
This function converts a Python Enum to a list of tuples, where the first element is the value and the second element is the name.
|
||||
|
||||
It's for use with Django's `choices` attribute, which expects a list of tuples.
|
||||
"""
|
||||
return [(item.value, item.name.replace("_", " ").title()) for item in enum_class]
|
||||
|
||||
|
||||
def one_week_from_now():
|
||||
"""
|
||||
Return a datetime object with a date one week from now.
|
||||
"""
|
||||
return datetime.now(timezone.utc) + timedelta(days=7)
|
||||
|
||||
|
||||
def generate_random_token(length: int = 14, symbols: str | None = None) -> str:
|
||||
"""
|
||||
Generate a random token with the specified length.
|
||||
"""
|
||||
_symbols = "23456789ABCDEFGHJKMNPQRSTVWXYZ"
|
||||
return "".join(secrets.choice(symbols or _symbols) for _ in range(length))
|
||||
|
||||
|
||||
def batch_delete(tenant_id, queryset, batch_size=settings.DJANGO_DELETION_BATCH_SIZE):
|
||||
"""
|
||||
Deletes objects in batches and returns the total number of deletions and a summary.
|
||||
|
||||
Args:
|
||||
tenant_id (str): Tenant ID the queryset belongs to.
|
||||
queryset (QuerySet): The queryset of objects to delete.
|
||||
batch_size (int): The number of objects to delete in each batch.
|
||||
|
||||
Returns:
|
||||
tuple: (total_deleted, deletion_summary)
|
||||
"""
|
||||
total_deleted = 0
|
||||
deletion_summary = {}
|
||||
|
||||
while True:
|
||||
with rls_transaction(tenant_id, POSTGRES_TENANT_VAR):
|
||||
# Get a batch of IDs to delete
|
||||
batch_ids = set(
|
||||
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
|
||||
)
|
||||
if not batch_ids:
|
||||
# No more objects to delete
|
||||
break
|
||||
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
|
||||
total_deleted += deleted_count
|
||||
for model_label, count in deleted_info.items():
|
||||
deletion_summary[model_label] = deletion_summary.get(model_label, 0) + count
|
||||
|
||||
return total_deleted, deletion_summary
|
||||
|
||||
|
||||
def delete_related_daily_task(provider_id: str):
|
||||
"""
|
||||
Deletes the periodic task associated with a specific provider.
|
||||
|
||||
Args:
|
||||
provider_id (str): The unique identifier for the provider
|
||||
whose related periodic task should be deleted.
|
||||
"""
|
||||
task_name = f"scan-perform-scheduled-{provider_id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
|
||||
|
||||
# Postgres Enums
|
||||
|
||||
|
||||
class PostgresEnumMigration:
|
||||
def __init__(self, enum_name: str, enum_values: tuple):
|
||||
self.enum_name = enum_name
|
||||
self.enum_values = enum_values
|
||||
|
||||
def create_enum_type(self, apps, schema_editor): # noqa: F841
|
||||
string_enum_values = ", ".join([f"'{value}'" for value in self.enum_values])
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f"CREATE TYPE {self.enum_name} AS ENUM ({string_enum_values});"
|
||||
)
|
||||
|
||||
def drop_enum_type(self, apps, schema_editor): # noqa: F841
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(f"DROP TYPE {self.enum_name};")
|
||||
|
||||
|
||||
class PostgresEnumField(models.Field):
|
||||
def __init__(self, enum_type_name, *args, **kwargs):
|
||||
self.enum_type_name = enum_type_name
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def db_type(self, connection):
|
||||
return self.enum_type_name
|
||||
|
||||
def from_db_value(self, value, expression, connection): # noqa: F841
|
||||
return value
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(value, EnumType):
|
||||
return value.value
|
||||
return value
|
||||
|
||||
def get_prep_value(self, value):
|
||||
if isinstance(value, EnumType):
|
||||
return value.value
|
||||
return value
|
||||
|
||||
|
||||
class EnumType:
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
|
||||
def enum_adapter(enum_obj):
|
||||
return AsIs(f"'{enum_obj.value}'::{enum_obj.__class__.enum_type_name}")
|
||||
|
||||
|
||||
def get_enum_oid(connection, enum_type_name: str):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT oid FROM pg_type WHERE typname = %s;", (enum_type_name,))
|
||||
result = cursor.fetchone()
|
||||
if result is None:
|
||||
raise ValueError(f"Enum type '{enum_type_name}' not found")
|
||||
return result[0]
|
||||
|
||||
|
||||
def register_enum(apps, schema_editor, enum_class): # noqa: F841
|
||||
with psycopg_connection(schema_editor.connection.alias) as connection:
|
||||
enum_oid = get_enum_oid(connection, enum_class.enum_type_name)
|
||||
enum_instance = new_type(
|
||||
(enum_oid,),
|
||||
enum_class.enum_type_name,
|
||||
lambda value, cur: value, # noqa: F841
|
||||
)
|
||||
register_type(enum_instance, connection)
|
||||
register_adapter(enum_class, enum_adapter)
|
||||
|
||||
|
||||
# Postgres enum definition for member role
|
||||
|
||||
|
||||
class MemberRoleEnum(EnumType):
|
||||
enum_type_name = "member_role"
|
||||
|
||||
|
||||
class MemberRoleEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("member_role", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider.provider
|
||||
|
||||
|
||||
class ProviderEnum(EnumType):
|
||||
enum_type_name = "provider"
|
||||
|
||||
|
||||
class ProviderEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("provider", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Scan.type
|
||||
|
||||
|
||||
class ScanTriggerEnum(EnumType):
|
||||
enum_type_name = "scan_trigger"
|
||||
|
||||
|
||||
class ScanTriggerEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("scan_trigger", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for state
|
||||
|
||||
|
||||
class StateEnum(EnumType):
|
||||
enum_type_name = "state"
|
||||
|
||||
|
||||
class StateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("state", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Finding.Delta
|
||||
|
||||
|
||||
class FindingDeltaEnum(EnumType):
|
||||
enum_type_name = "finding_delta"
|
||||
|
||||
|
||||
class FindingDeltaEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("finding_delta", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Severity
|
||||
|
||||
|
||||
class SeverityEnum(EnumType):
|
||||
enum_type_name = "severity"
|
||||
|
||||
|
||||
class SeverityEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("severity", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Status
|
||||
|
||||
|
||||
class StatusEnum(EnumType):
|
||||
enum_type_name = "status"
|
||||
|
||||
|
||||
class StatusEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("status", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider secrets type
|
||||
|
||||
|
||||
class ProviderSecretTypeEnum(EnumType):
|
||||
enum_type_name = "provider_secret_type"
|
||||
|
||||
|
||||
class ProviderSecretTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("provider_secret_type", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider secrets type
|
||||
|
||||
|
||||
class InvitationStateEnum(EnumType):
|
||||
enum_type_name = "invitation_state"
|
||||
|
||||
|
||||
class InvitationStateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("invitation_state", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Integration type
|
||||
|
||||
|
||||
class IntegrationTypeEnum(EnumType):
|
||||
enum_type_name = "integration_type"
|
||||
|
||||
|
||||
class IntegrationTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("integration_type", *args, **kwargs)
|
||||
68
api/src/backend/api/decorators.py
Normal file
68
api/src/backend/api/decorators.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import uuid
|
||||
from functools import wraps
|
||||
|
||||
from django.db import connection, transaction
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
|
||||
|
||||
def set_tenant(func=None, *, keep_tenant=False):
|
||||
"""
|
||||
Decorator to set the tenant context for a Celery task based on the provided tenant_id.
|
||||
|
||||
This decorator extracts the `tenant_id` from the task's keyword arguments,
|
||||
and uses it to set the tenant context for the current database session.
|
||||
The `tenant_id` is then removed from the kwargs before the task function
|
||||
is executed. If `tenant_id` is not provided, a KeyError is raised.
|
||||
|
||||
Args:
|
||||
func (function): The Celery task function to be decorated.
|
||||
|
||||
Raises:
|
||||
KeyError: If `tenant_id` is not found in the task's keyword arguments.
|
||||
|
||||
Returns:
|
||||
function: The wrapped function with tenant context set.
|
||||
|
||||
Example:
|
||||
# This decorator MUST be defined the last in the decorator chain
|
||||
|
||||
@shared_task
|
||||
@set_tenant
|
||||
def some_task(arg1, **kwargs):
|
||||
# Task logic here
|
||||
pass
|
||||
|
||||
# When calling the task
|
||||
some_task.delay(arg1, tenant_id="8db7ca86-03cc-4d42-99f6-5e480baf6ab5")
|
||||
|
||||
# The tenant context will be set before the task logic executes.
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
@transaction.atomic
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
if not keep_tenant:
|
||||
tenant_id = kwargs.pop("tenant_id")
|
||||
else:
|
||||
tenant_id = kwargs["tenant_id"]
|
||||
except KeyError:
|
||||
raise KeyError("This task requires the tenant_id")
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
if func is None:
|
||||
return decorator
|
||||
else:
|
||||
return decorator(func)
|
||||
45
api/src/backend/api/exceptions.py
Normal file
45
api/src/backend/api/exceptions.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from django.core.exceptions import ValidationError as django_validation_error
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework_json_api.exceptions import exception_handler
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
|
||||
|
||||
|
||||
class ModelValidationError(ValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
detail: str | None = None,
|
||||
code: str | None = None,
|
||||
pointer: str | None = None,
|
||||
status_code: int = 400,
|
||||
):
|
||||
super().__init__(
|
||||
detail=[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": str(status_code),
|
||||
"source": {"pointer": pointer},
|
||||
"code": code,
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class InvitationTokenExpiredException(APIException):
|
||||
status_code = status.HTTP_410_GONE
|
||||
default_detail = "The invitation token has expired and is no longer valid."
|
||||
default_code = "token_expired"
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
if isinstance(exc, django_validation_error):
|
||||
if hasattr(exc, "error_dict"):
|
||||
exc = ValidationError(exc.message_dict)
|
||||
else:
|
||||
exc = ValidationError(detail=exc.messages[0], code=exc.code)
|
||||
elif isinstance(exc, (TokenError, InvalidToken)):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
662
api/src/backend/api/filters.py
Normal file
662
api/src/backend/api/filters.py
Normal file
@@ -0,0 +1,662 @@
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
from django_filters.rest_framework import (
|
||||
BaseInFilter,
|
||||
BooleanFilter,
|
||||
CharFilter,
|
||||
ChoiceFilter,
|
||||
DateFilter,
|
||||
FilterSet,
|
||||
UUIDFilter,
|
||||
)
|
||||
from rest_framework_json_api.django_filters.backends import DjangoFilterBackend
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_utils import (
|
||||
FindingDeltaEnumField,
|
||||
InvitationStateEnumField,
|
||||
ProviderEnumField,
|
||||
SeverityEnumField,
|
||||
StatusEnumField,
|
||||
)
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
Membership,
|
||||
PermissionChoices,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
Role,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
SeverityChoices,
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
User,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
from api.uuid_utils import (
|
||||
datetime_to_uuid7,
|
||||
transform_into_uuid7,
|
||||
uuid7_end,
|
||||
uuid7_range,
|
||||
uuid7_start,
|
||||
)
|
||||
from api.v1.serializers import TaskBase
|
||||
|
||||
|
||||
class CustomDjangoFilterBackend(DjangoFilterBackend):
|
||||
def to_html(self, _request, _queryset, _view):
|
||||
"""Override this method to use the Browsable API in dev environments.
|
||||
|
||||
This disables the HTML render for the default filter.
|
||||
"""
|
||||
return None
|
||||
|
||||
def get_filterset_class(self, view, queryset=None):
|
||||
# Check if the view has 'get_filterset_class' method
|
||||
if hasattr(view, "get_filterset_class"):
|
||||
return view.get_filterset_class()
|
||||
# Fallback to the default implementation
|
||||
return super().get_filterset_class(view, queryset)
|
||||
|
||||
|
||||
class UUIDInFilter(BaseInFilter, UUIDFilter):
|
||||
pass
|
||||
|
||||
|
||||
class CharInFilter(BaseInFilter, CharFilter):
|
||||
pass
|
||||
|
||||
|
||||
class ChoiceInFilter(BaseInFilter, ChoiceFilter):
|
||||
pass
|
||||
|
||||
|
||||
class TenantFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = Tenant
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class MembershipFilter(FilterSet):
|
||||
date_joined = DateFilter(field_name="date_joined", lookup_expr="date")
|
||||
role = ChoiceFilter(choices=Membership.RoleChoices.choices)
|
||||
|
||||
class Meta:
|
||||
model = Membership
|
||||
fields = {
|
||||
"tenant": ["exact"],
|
||||
"role": ["exact"],
|
||||
"date_joined": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ProviderFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
connected = BooleanFilter()
|
||||
provider = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
|
||||
class Meta:
|
||||
model = Provider
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "icontains", "in"],
|
||||
"alias": ["exact", "icontains", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
filter_overrides = {
|
||||
ProviderEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ProviderRelationshipFilterSet(FilterSet):
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(field_name="provider__alias", lookup_expr="in")
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
|
||||
class ProviderGroupFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = ProviderGroup
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"name": ["exact", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ScanFilter(ProviderRelationshipFilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
completed_at = DateFilter(field_name="completed_at", lookup_expr="date")
|
||||
started_at = DateFilter(field_name="started_at", lookup_expr="date")
|
||||
next_scan_at = DateFilter(field_name="next_scan_at", lookup_expr="date")
|
||||
trigger = ChoiceFilter(choices=Scan.TriggerChoices.choices)
|
||||
state = ChoiceFilter(choices=StateChoices.choices)
|
||||
state__in = ChoiceInFilter(
|
||||
field_name="state", choices=StateChoices.choices, lookup_expr="in"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Scan
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"name": ["exact", "icontains"],
|
||||
"started_at": ["gte", "lte"],
|
||||
"next_scan_at": ["gte", "lte"],
|
||||
"trigger": ["exact"],
|
||||
}
|
||||
|
||||
|
||||
class TaskFilter(FilterSet):
|
||||
name = CharFilter(field_name="task_runner_task__task_name", lookup_expr="exact")
|
||||
name__icontains = CharFilter(
|
||||
field_name="task_runner_task__task_name", lookup_expr="icontains"
|
||||
)
|
||||
state = ChoiceFilter(
|
||||
choices=StateChoices.choices, method="filter_state", lookup_expr="exact"
|
||||
)
|
||||
task_state_inverse_mapping_values = {
|
||||
v: k for k, v in TaskBase.state_mapping.items()
|
||||
}
|
||||
|
||||
def filter_state(self, queryset, name, value):
|
||||
if value not in StateChoices:
|
||||
raise ValidationError(
|
||||
f"Invalid provider value: '{value}'. Valid values are: "
|
||||
f"{', '.join(StateChoices)}"
|
||||
)
|
||||
|
||||
return queryset.filter(
|
||||
task_runner_task__status=self.task_state_inverse_mapping_values[value]
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Task
|
||||
fields = []
|
||||
|
||||
|
||||
class ResourceTagFilter(FilterSet):
|
||||
class Meta:
|
||||
model = ResourceTag
|
||||
fields = {
|
||||
"key": ["exact", "icontains"],
|
||||
"value": ["exact", "icontains"],
|
||||
}
|
||||
search = ["text_search"]
|
||||
|
||||
|
||||
class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
tag_key = CharFilter(method="filter_tag_key")
|
||||
tag_value = CharFilter(method="filter_tag_value")
|
||||
tag = CharFilter(method="filter_tag")
|
||||
tags = CharFilter(method="filter_tag")
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"uid": ["exact", "icontains"],
|
||||
"name": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
"service": ["exact", "icontains", "in"],
|
||||
"type": ["exact", "icontains", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
def filter_tag_value(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
|
||||
|
||||
def filter_tag(self, queryset, name, value):
|
||||
# We won't know what the user wants to filter on just based on the value,
|
||||
# and we don't want to build special filtering logic for every possible
|
||||
# provider tag spec, so we'll just do a full text search
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class FindingFilter(FilterSet):
|
||||
# We filter providers from the scan in findings
|
||||
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="scan__provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="in"
|
||||
)
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
uid = CharFilter(field_name="uid")
|
||||
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
|
||||
status = ChoiceFilter(choices=StatusChoices.choices)
|
||||
severity = ChoiceFilter(choices=SeverityChoices)
|
||||
impact = ChoiceFilter(choices=SeverityChoices)
|
||||
muted = BooleanFilter(
|
||||
help_text="If this filter is not provided, muted and non-muted findings will be returned."
|
||||
)
|
||||
|
||||
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
|
||||
|
||||
region = CharFilter(field_name="resources__region")
|
||||
region__in = CharInFilter(field_name="resources__region", lookup_expr="in")
|
||||
region__icontains = CharFilter(
|
||||
field_name="resources__region", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
service = CharFilter(field_name="resources__service")
|
||||
service__in = CharInFilter(field_name="resources__service", lookup_expr="in")
|
||||
service__icontains = CharFilter(
|
||||
field_name="resources__service", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_uid = CharFilter(field_name="resources__uid")
|
||||
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
|
||||
resource_uid__icontains = CharFilter(
|
||||
field_name="resources__uid", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_name = CharFilter(field_name="resources__name")
|
||||
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
|
||||
resource_name__icontains = CharFilter(
|
||||
field_name="resources__name", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_type = CharFilter(field_name="resources__type")
|
||||
resource_type__in = CharInFilter(field_name="resources__type", lookup_expr="in")
|
||||
resource_type__icontains = CharFilter(
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# resource_tag_key = CharFilter(field_name="resources__tags__key")
|
||||
# resource_tag_key__in = CharInFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_key__icontains = CharFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tag_value = CharFilter(field_name="resources__tags__value")
|
||||
# resource_tag_value__in = CharInFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_value__icontains = CharFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tags = CharInFilter(
|
||||
# method="filter_resource_tag",
|
||||
# lookup_expr="in",
|
||||
# help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
|
||||
# "separated by commas.",
|
||||
# )
|
||||
|
||||
scan = UUIDFilter(method="filter_scan_id")
|
||||
scan__in = UUIDInFilter(method="filter_scan_id_in")
|
||||
|
||||
inserted_at = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__date = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__gte = DateFilter(
|
||||
method="filter_inserted_at_gte",
|
||||
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
)
|
||||
inserted_at__lte = DateFilter(
|
||||
method="filter_inserted_at_lte",
|
||||
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "in"],
|
||||
"scan": ["exact", "in"],
|
||||
"delta": ["exact", "in"],
|
||||
"status": ["exact", "in"],
|
||||
"severity": ["exact", "in"],
|
||||
"impact": ["exact", "in"],
|
||||
"check_id": ["exact", "in", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
filter_overrides = {
|
||||
FindingDeltaEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
StatusEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
SeverityEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
|
||||
self.data.get("inserted_at")
|
||||
or self.data.get("inserted_at__date")
|
||||
or self.data.get("inserted_at__gte")
|
||||
or self.data.get("inserted_at__lte")
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "At least one date filter is required: filter[inserted_at], filter[inserted_at.gte], "
|
||||
"or filter[inserted_at.lte].",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/inserted_at"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
gte_date = (
|
||||
datetime.strptime(self.data.get("inserted_at__gte"), "%Y-%m-%d").date()
|
||||
if self.data.get("inserted_at__gte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
lte_date = (
|
||||
datetime.strptime(self.data.get("inserted_at__lte"), "%Y-%m-%d").date()
|
||||
if self.data.get("inserted_at__lte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
|
||||
if abs(lte_date - gte_date) > timedelta(
|
||||
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/inserted_at"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
# Convert filter values to UUIDv7 values for use with partitioning
|
||||
def filter_scan_id(self, queryset, name, value):
|
||||
try:
|
||||
value_uuid = transform_into_uuid7(value)
|
||||
start = uuid7_start(value_uuid)
|
||||
end = uuid7_end(value_uuid, settings.FINDINGS_TABLE_PARTITION_MONTHS)
|
||||
except ValidationError as validation_error:
|
||||
detail = str(validation_error.detail[0])
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/relationships/scan"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return (
|
||||
queryset.filter(id__gte=start).filter(id__lt=end).filter(scan_id=value_uuid)
|
||||
)
|
||||
|
||||
def filter_scan_id_in(self, queryset, name, value):
|
||||
try:
|
||||
uuid_list = [
|
||||
transform_into_uuid7(value_uuid)
|
||||
for value_uuid in value
|
||||
if value_uuid is not None
|
||||
]
|
||||
|
||||
start, end = uuid7_range(uuid_list)
|
||||
except ValidationError as validation_error:
|
||||
detail = str(validation_error.detail[0])
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/relationships/scan"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
if start == end:
|
||||
return queryset.filter(id__gte=start).filter(scan_id__in=uuid_list)
|
||||
else:
|
||||
return (
|
||||
queryset.filter(id__gte=start)
|
||||
.filter(id__lt=end)
|
||||
.filter(scan_id__in=uuid_list)
|
||||
)
|
||||
|
||||
def filter_inserted_at(self, queryset, name, value):
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(datetime_value))
|
||||
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
|
||||
|
||||
return queryset.filter(id__gte=start, id__lt=end)
|
||||
|
||||
def filter_inserted_at_gte(self, queryset, name, value):
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(datetime_value))
|
||||
|
||||
return queryset.filter(id__gte=start)
|
||||
|
||||
def filter_inserted_at_lte(self, queryset, name, value):
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
|
||||
|
||||
return queryset.filter(id__lt=end)
|
||||
|
||||
def filter_resource_tag(self, queryset, name, value):
|
||||
overall_query = Q()
|
||||
for key_value_pair in value:
|
||||
tag_key, tag_value = key_value_pair.split(":", 1)
|
||||
overall_query |= Q(
|
||||
resources__tags__key__icontains=tag_key,
|
||||
resources__tags__value__icontains=tag_value,
|
||||
)
|
||||
return queryset.filter(overall_query).distinct()
|
||||
|
||||
@staticmethod
|
||||
def maybe_date_to_datetime(value):
|
||||
dt = value
|
||||
if isinstance(value, date):
|
||||
dt = datetime.combine(value, datetime.min.time(), tzinfo=timezone.utc)
|
||||
return dt
|
||||
|
||||
|
||||
class ProviderSecretFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
provider = UUIDFilter(field_name="provider__id", lookup_expr="exact")
|
||||
|
||||
class Meta:
|
||||
model = ProviderSecret
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
}
|
||||
|
||||
|
||||
class InvitationFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
expires_at = DateFilter(field_name="expires_at", lookup_expr="date")
|
||||
state = ChoiceFilter(choices=Invitation.State.choices)
|
||||
state__in = ChoiceInFilter(choices=Invitation.State.choices, lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Invitation
|
||||
fields = {
|
||||
"email": ["exact", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["date", "gte", "lte"],
|
||||
"expires_at": ["date", "gte", "lte"],
|
||||
"inviter": ["exact"],
|
||||
}
|
||||
filter_overrides = {
|
||||
InvitationStateEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class UserFilter(FilterSet):
|
||||
date_joined = DateFilter(field_name="date_joined", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
"email": ["exact", "icontains"],
|
||||
"company_name": ["exact", "icontains"],
|
||||
"date_joined": ["date", "gte", "lte"],
|
||||
"is_active": ["exact"],
|
||||
}
|
||||
|
||||
|
||||
class RoleFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
permission_state = ChoiceFilter(
|
||||
choices=PermissionChoices.choices, method="filter_permission_state"
|
||||
)
|
||||
|
||||
def filter_permission_state(self, queryset, name, value):
|
||||
return Role.filter_by_permission_state(queryset, value)
|
||||
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"name": ["exact", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ComplianceOverviewFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
provider_type__in = ChoiceInFilter(choices=Provider.ProviderChoices.choices)
|
||||
scan_id = UUIDFilter(field_name="scan__id")
|
||||
|
||||
class Meta:
|
||||
model = ComplianceOverview
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"compliance_id": ["exact", "icontains"],
|
||||
"framework": ["exact", "iexact", "icontains"],
|
||||
"version": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
|
||||
class ScanSummaryFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
provider_id = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider_type = ChoiceFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
region = CharFilter(field_name="region")
|
||||
|
||||
class Meta:
|
||||
model = ScanSummary
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
|
||||
class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
def is_valid(self):
|
||||
# Check if at least one of the inserted_at filters is present
|
||||
inserted_at_filters = [
|
||||
self.data.get("inserted_at"),
|
||||
self.data.get("inserted_at__gte"),
|
||||
self.data.get("inserted_at__lte"),
|
||||
]
|
||||
if not any(inserted_at_filters):
|
||||
raise ValidationError(
|
||||
{
|
||||
"inserted_at": [
|
||||
"At least one of filter[inserted_at], filter[inserted_at__gte], or "
|
||||
"filter[inserted_at__lte] is required."
|
||||
]
|
||||
}
|
||||
)
|
||||
return super().is_valid()
|
||||
|
||||
|
||||
class IntegrationFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
integration_type = ChoiceFilter(choices=Integration.IntegrationChoices.choices)
|
||||
integration_type__in = ChoiceInFilter(
|
||||
choices=Integration.IntegrationChoices.choices,
|
||||
field_name="integration_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
}
|
||||
28
api/src/backend/api/fixtures/dev/0_dev_users.json
Normal file
28
api/src/backend/api/fixtures/dev/0_dev_users.json
Normal file
@@ -0,0 +1,28 @@
|
||||
[
|
||||
{
|
||||
"model": "api.user",
|
||||
"pk": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"last_login": null,
|
||||
"name": "Devie Prowlerson",
|
||||
"email": "dev@prowler.com",
|
||||
"company_name": "Prowler Developers",
|
||||
"is_active": true,
|
||||
"date_joined": "2024-09-17T09:04:20.850Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.user",
|
||||
"pk": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"last_login": null,
|
||||
"name": "Devietoo Prowlerson",
|
||||
"email": "dev2@prowler.com",
|
||||
"company_name": "Prowler Developers",
|
||||
"is_active": true,
|
||||
"date_joined": "2024-09-18T09:04:20.850Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
50
api/src/backend/api/fixtures/dev/1_dev_tenants.json
Normal file
50
api/src/backend/api/fixtures/dev/1_dev_tenants.json
Normal file
@@ -0,0 +1,50 @@
|
||||
[
|
||||
{
|
||||
"model": "api.tenant",
|
||||
"pk": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"fields": {
|
||||
"inserted_at": "2024-03-21T23:00:00Z",
|
||||
"updated_at": "2024-03-21T23:00:00Z",
|
||||
"name": "Tenant1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.tenant",
|
||||
"pk": "0412980b-06e3-436a-ab98-3c9b1d0333d3",
|
||||
"fields": {
|
||||
"inserted_at": "2024-03-21T23:00:00Z",
|
||||
"updated_at": "2024-03-21T23:00:00Z",
|
||||
"name": "Tenant2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "2b0db93a-7e0b-4edf-a851-ea448676b7eb",
|
||||
"fields": {
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"tenant": "0412980b-06e3-436a-ab98-3c9b1d0333d3",
|
||||
"role": "owner",
|
||||
"date_joined": "2024-09-19T11:03:59.712Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "797d7cee-abc9-4598-98bb-4bf4bfb97f27",
|
||||
"fields": {
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "owner",
|
||||
"date_joined": "2024-09-19T11:02:59.712Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "dea37563-7009-4dcf-9f18-25efb41462a7",
|
||||
"fields": {
|
||||
"user": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "member",
|
||||
"date_joined": "2024-09-19T11:03:59.712Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
193
api/src/backend/api/fixtures/dev/2_dev_providers.json
Normal file
193
api/src/backend/api/fixtures/dev/2_dev_providers.json
Normal file
@@ -0,0 +1,193 @@
|
||||
[
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "37b065f8-26b0-4218-a665-0b23d07b27d9",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:20:27.050Z",
|
||||
"updated_at": "2024-08-01T17:20:27.050Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12322-test321",
|
||||
"alias": "gcp_testing_2",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "8851db6b-42e5-4533-aa9e-30a32d67e875",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:19:42.453Z",
|
||||
"updated_at": "2024-08-01T17:19:42.453Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12345-test123",
|
||||
"alias": "gcp_testing_1",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:19:09.556Z",
|
||||
"updated_at": "2024-08-01T17:19:09.556Z",
|
||||
"provider": "aws",
|
||||
"uid": "123456789020",
|
||||
"alias": "aws_testing_2",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:20:16.962Z",
|
||||
"updated_at": "2024-08-01T17:20:16.962Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12322-test123",
|
||||
"alias": "gcp_testing_3",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "d7c7ea89-d9af-423b-a364-1290dcad5a01",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:18:58.132Z",
|
||||
"updated_at": "2024-08-01T17:18:58.132Z",
|
||||
"provider": "aws",
|
||||
"uid": "123456789015",
|
||||
"alias": "aws_testing_1",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-06T16:03:26.176Z",
|
||||
"updated_at": "2024-08-06T16:03:26.176Z",
|
||||
"provider": "azure",
|
||||
"uid": "8851db6b-42e5-4533-aa9e-30a32d67e875",
|
||||
"alias": "azure_testing",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "26e55a24-cb2c-4cef-ac87-6f91fddb2c97",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-06T16:03:07.037Z",
|
||||
"updated_at": "2024-08-06T16:03:07.037Z",
|
||||
"provider": "kubernetes",
|
||||
"uid": "kubernetes-test-12345",
|
||||
"alias": "k8s_testing",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.352Z",
|
||||
"updated_at": "2024-10-18T11:16:23.533Z",
|
||||
"provider": "aws",
|
||||
"uid": "106908755759",
|
||||
"alias": "real testing aws provider",
|
||||
"connected": true,
|
||||
"connection_last_checked_at": "2024-10-18T11:16:23.503Z",
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.352Z",
|
||||
"updated_at": "2024-10-18T11:16:23.533Z",
|
||||
"provider": "kubernetes",
|
||||
"uid": "gke_lucky-coast-419309_us-central1_autopilot-cluster-2",
|
||||
"alias": "k8s_testing_2",
|
||||
"connected": true,
|
||||
"connection_last_checked_at": "2024-10-18T11:16:23.503Z",
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "11491b47-75ae-4f71-ad8d-3e630a72182e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-11T08:03:05.026Z",
|
||||
"updated_at": "2024-10-11T08:04:47.033Z",
|
||||
"name": "GCP static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTndmZW9KakRZUHM2UHhQN2V3RzN0QmM1cERham8yMHp5cnVTT0lzdGFyS1FuVmJXUlpYSGsyU0cxR3RMMEdQYXlYMUVsaWtqLU1OZWlaVUp6OFREYlotZTVBY3BuTlZYbm9YcUJydzAxV2p5dkpLamI1Y2tUYzA0MmJUNWxsNTBRM0E1SDRCa0pPQWVlb05YU3dfeUhkLTRmOEh3dGczOGh1ZGhQcVdZdVAtYmtoSWlwNXM4VGFoVmF3dno2X1hrbk5GZjZTWjVuWEdEZUFXeHJSQjEzbTlVakhNdzYyWTdiVEpvUEc2MTNpRzUtczhEank1eGI0b3MyMlAyaGN6dlByZmtUWHByaDNUYWFqYS1tYnNBUkRKTzBacFNSRjFuVmd5bUtFUEJhd1ZVS1ZDd2xSUV9PaEtLTnc0XzVkY2lhM01WTjQwaWdJSk9wNUJSXzQ4RUNQLXFPNy1VdzdPYkZyWkVkU3RyQjVLTS1MVHN0R3k4THNKZ2NBNExaZnl3Q1EwN2dwNGRsUXptMjB0LXUzTUpzTDE2Q1hmS0ZSN2g1ZjBPeV8taFoxNUwxc2FEcktXX0dCM1IzeUZTTHNiTmNxVXBvNWViZTJScUVWV2VYTFQ4UHlid21PY1A0UjdNMGtERkZCd0lLMlJENDMzMVZUM09DQ0twd1N3VHlZd09XLUctOWhYcFJIR1p5aUlZeEUzejc2dWRYdGNsd0xOODNqRUFEczhSTWNtWU0tdFZ1ZTExaHNHUVYtd0Zxdld1LTdKVUNINzlZTGdHODhKeVVpQmRZMHRUNTJRRWhwS1F1Y3I2X2Iwc0c1NHlXSVRLZWxreEt0dVRnOTZFMkptU2VMS1dWXzdVOVRzMUNUWXM2aFlxVDJXdGo3d2cxSVZGWlI2ZWhIZzZBcEl4bEJ6UnVHc0RYWVNHcjFZUHI5ZUYyWG9rSlo0QUVSUkFCX3h2UmtJUTFzVXJUZ25vTmk2VzdoTTNta05ucmNfTi0yR1ZxN1E2MnZJOVVKOGxmMXMzdHMxVndmSVhQbUItUHgtMVpVcHJwMU5JVHJLb0Y1aHV5OEEwS0kzQkEtcFJkdkRnWGxmZnprNFhndWg1TmQyd09yTFdTRmZ3d2ZvZFUtWXp4a2VYb3JjckFIcE13MDUzX0RHSnlzM0N2ZE5IRzJzMXFMc0k4MDRyTHdLZFlWOG9SaFF0LU43Ynd6VFlEcVNvdFZ0emJEVk10aEp4dDZFTFNFNzk0UUo2WTlVLWRGYm1fanZHaFZreHBIMmtzVjhyS0xPTk9fWHhiVTJHQXZwVlVuY3JtSjFUYUdHQzhEaHFNZXhwUHBmY0kxaUVrOHo4a0FYOTdpZVJDbFRvdFlQeWo3eFZHX1ZMZ1Myc3prU3o2c3o2eXNja1U4N0Y1T0d1REVjZFRGNTByUkgyemVCSjlQYkY2bmJ4YTZodHB0cUNzd2xZcENycUdsczBIaEZPbG1jVUlqNlM2cEE3aGpVaWswTzBDLVFGUHM5UHhvM09saWNtaDhaNVlsc3FZdktKeWlheDF5OGhTODE2N3JWamdTZG5Fa3JSQ2ZUSEVfRjZOZXdreXRZLTBZRFhleVFFeC1YUzc0cWhYeEhobGxvdnZ3Rm15WFlBWXp0dm1DeTA5eExLeEFRRXVRSXBXdTNEaWdZZ3JDenItdDhoZlFiTzI0SGZ1c01FR1FNaFVweVBKR1YxWGRUMW1Mc2JVdW9raWR6UHk2ZTBnS05pV3oyZVBjREdkY3k4ZHZPUWE5S281MkJRSHF3NnpTclZ5bl90bk1wUEh6Tkp5dXlDcE5paWRqcVhxRFVObWIzRldWOGJ2aC1CRHZpbFZrb0hjNGpCMm5POGRiS2lETUpMLUVfQlhCdTZPLW9USW1LTFlTSF9zRUJYZ1NKeFFEQjNOR215ZXJDbkFndmcxWl9rWlk9",
|
||||
"provider": "8851db6b-42e5-4533-aa9e-30a32d67e875"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "40191ad5-d8c2-40a9-826d-241397626b68",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-10T11:11:44.515Z",
|
||||
"updated_at": "2024-10-11T07:59:56.102Z",
|
||||
"name": "AWS static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTnI4Y1RyV19UWEJzc3kzQUExcU5tdlQzbFVLeDdZMWd1MzkwWkl2UF9oZGhiVEJHVWpSMXV4MjYyN3g2OVpvNVpkQUQ3S0VGaGdQLTFhQWE3MkpWZUt2cnVhODc4d3FpY3FVZkpwdHJzNUJPeFRwZ3N4bGpPZTlkNWRNdFlwTHU3aTNWR3JjSzJwLWRITHdfQWpXb1F0c1l3bVFxbnFrTEpPTGgxcnF1VUprSzZ5dGRQU2VGYmZhTTlwbVpsNFBNWlFhVW9RbjJyYnZ5N0oweE5kV0ZEaUdpUUpNVExOa3oyQ2dNREVSenJ0TEFZc0RrRWpXNUhyMmtybGNLWDVOR0FabEl4QVR1bkZyb2hBLWc1MFNIekVyeXI0SmVreHBjRnJ1YUlVdXpVbW9JZkk0aEgxYlM1VGhSRlhtcS14YzdTYUhXR2xodElmWjZuNUVwaHozX1RVTG1QWHdPZWd4clNHYnAyOTBsWEl5UU83RGxZb0RKWjdadjlsTmJtSHQ0Yl9uaDJoODB0QV9sWmFYbFAxcjA1bmhNVlNqc2xEeHlvcUJFbVZvY250ZENnMnZLT1psb1JDclB3WVR6NGdZb2pzb3U4Ny04QlB0UTZub0dMOXZEUTZEcVJhZldCWEZZSDdLTy02UVZqck5zVTZwS3pObGlOejNJeHUzbFRabFM2V2xaekZVRjZtX3VzZlplendnOWQzT01WMFd3ejNadHVlTFlqRGR2dk5Da29zOFYwOUdOaEc4OHhHRnJFMmJFMk12VDNPNlBBTGlsXy13cUM1QkVYb0o1Z2U4ZXJnWXpZdm1sWjA5bzQzb2NFWC1xbmIycGZRbGtCaGNaOWlkX094UUNNampwbkZoREctNWI4QnZRaE8zM3BEQ1BwNzA1a3BzOGczZXdIM2s1NHFGN1ZTbmJhZkc4RVdfM0ZIZU5udTBYajd1RGxpWXZpRWdSMmhHa2RKOEIzbmM0X2F1OGxrN2p6LW9UVldDOFVpREoxZ1UzcTBZX19OQ0xJb0syWlhNSlQ4MzQwdzRtVG94Y01GS3FMLV95UVlxOTFORk8zdjE5VGxVaXdhbGlzeHdoYWNzazZWai1GUGtUM2gzR0ZWTTY4SThWeVFnZldIaklOTTJqTTg1VkhEYW5wNmdEVllXMmJCV2tpVmVYeUV2c0E1T00xbHJRNzgzVG9wb0Q1cV81UEhqYUFsQ2p1a0VpRDVINl9SVkpyZVRNVnVXQUxwY3NWZnJrNmRVREpiLWNHYUpXWmxkQlhNbWhuR1NmQ1BaVDlidUxCWHJMaHhZbk1FclVBaEVZeWg1ZlFoenZzRHlKbV8wa3lmMGZrd3NmTDZjQkE0UXNSUFhpTWtUUHBrX29BVzc4QzEtWEJIQW1GMGFuZVlXQWZIOXJEamloeGFCeHpYMHNjMFVfNXpQdlJfSkk2bzFROU5NU0c1SHREWW1nbkFNZFZ0UjdPRGdjaF96RGplY1hjdFFzLVR6MTVXYlRjbHIxQ2JRejRpVko5NWhBU0ZHR3ZvczU5elljRGpHRTdIc0FsSm5fUHEwT1gtTS1lN3M3X3ZZRnlkYUZoZXRQeEJsZlhLdFdTUzU1NUl4a29aOWZIdTlPM0Fnak1xYWVkYTNiMmZXUHlXS2lwUVBZLXQyaUxuRmtQNFFieE9SVmdZVW9WTHlzbnBPZlNIdGVHOE1LNVNESjN3cGtVSHVpT1NJWHE1ZzNmUTVTOC0xX3NGSmJqU19IbjZfQWtMRG1YNUQtRy13TUJIZFlyOXJkQzFQbkdZVXVzM2czbS1HWHFBT1pXdVd3N09tcG82SVhnY1ZtUWxqTEg2UzJCUmllb2pweVN2aGwwS1FVRUhjNEN2amRMc3MwVU4zN3dVMWM5Slg4SERtenFaQk1yMWx0LWtxVWtLZVVtbU4yejVEM2h6TEt0RGdfWE09",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "ed89d1ea-366a-4d12-a602-f2ab77019742",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-10T11:11:44.515Z",
|
||||
"updated_at": "2024-10-11T07:59:56.102Z",
|
||||
"name": "Azure static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTnI4Y1RyV19UWEJzc3kzQUExcU5tdlQzbFVLeDdZMWd1MzkwWkl2UF9oZGhiVEJHVWpSMXV4MjYyN3g2OVpvNVpkQUQ3S0VGaGdQLTFhQWE3MkpWZUt2cnVhODc4d3FpY3FVZkpwdHJzNUJPeFRwZ3N4bGpPZTlkNWRNdFlwTHU3aTNWR3JjSzJwLWRITHdfQWpXb1F0c1l3bVFxbnFrTEpPTGgxcnF1VUprSzZ5dGRQU2VGYmZhTTlwbVpsNFBNWlFhVW9RbjJyYnZ5N0oweE5kV0ZEaUdpUUpNVExOa3oyQ2dNREVSenJ0TEFZc0RrRWpXNUhyMmtybGNLWDVOR0FabEl4QVR1bkZyb2hBLWc1MFNIekVyeXI0SmVreHBjRnJ1YUlVdXpVbW9JZkk0aEgxYlM1VGhSRlhtcS14YzdTYUhXR2xodElmWjZuNUVwaHozX1RVTG1QWHdPZWd4clNHYnAyOTBsWEl5UU83RGxZb0RKWjdadjlsTmJtSHQ0Yl9uaDJoODB0QV9sWmFYbFAxcjA1bmhNVlNqc2xEeHlvcUJFbVZvY250ZENnMnZLT1psb1JDclB3WVR6NGdZb2pzb3U4Ny04QlB0UTZub0dMOXZEUTZEcVJhZldCWEZZSDdLTy02UVZqck5zVTZwS3pObGlOejNJeHUzbFRabFM2V2xaekZVRjZtX3VzZlplendnOWQzT01WMFd3ejNadHVlTFlqRGR2dk5Da29zOFYwOUdOaEc4OHhHRnJFMmJFMk12VDNPNlBBTGlsXy13cUM1QkVYb0o1Z2U4ZXJnWXpZdm1sWjA5bzQzb2NFWC1xbmIycGZRbGtCaGNaOWlkX094UUNNampwbkZoREctNWI4QnZRaE8zM3BEQ1BwNzA1a3BzOGczZXdIM2s1NHFGN1ZTbmJhZkc4RVdfM0ZIZU5udTBYajd1RGxpWXZpRWdSMmhHa2RKOEIzbmM0X2F1OGxrN2p6LW9UVldDOFVpREoxZ1UzcTBZX19OQ0xJb0syWlhNSlQ4MzQwdzRtVG94Y01GS3FMLV95UVlxOTFORk8zdjE5VGxVaXdhbGlzeHdoYWNzazZWai1GUGtUM2gzR0ZWTTY4SThWeVFnZldIaklOTTJqTTg1VkhEYW5wNmdEVllXMmJCV2tpVmVYeUV2c0E1T00xbHJRNzgzVG9wb0Q1cV81UEhqYUFsQ2p1a0VpRDVINl9SVkpyZVRNVnVXQUxwY3NWZnJrNmRVREpiLWNHYUpXWmxkQlhNbWhuR1NmQ1BaVDlidUxCWHJMaHhZbk1FclVBaEVZeWg1ZlFoenZzRHlKbV8wa3lmMGZrd3NmTDZjQkE0UXNSUFhpTWtUUHBrX29BVzc4QzEtWEJIQW1GMGFuZVlXQWZIOXJEamloeGFCeHpYMHNjMFVfNXpQdlJfSkk2bzFROU5NU0c1SHREWW1nbkFNZFZ0UjdPRGdjaF96RGplY1hjdFFzLVR6MTVXYlRjbHIxQ2JRejRpVko5NWhBU0ZHR3ZvczU5elljRGpHRTdIc0FsSm5fUHEwT1gtTS1lN3M3X3ZZRnlkYUZoZXRQeEJsZlhLdFdTUzU1NUl4a29aOWZIdTlPM0Fnak1xYWVkYTNiMmZXUHlXS2lwUVBZLXQyaUxuRmtQNFFieE9SVmdZVW9WTHlzbnBPZlNIdGVHOE1LNVNESjN3cGtVSHVpT1NJWHE1ZzNmUTVTOC0xX3NGSmJqU19IbjZfQWtMRG1YNUQtRy13TUJIZFlyOXJkQzFQbkdZVXVzM2czbS1HWHFBT1pXdVd3N09tcG82SVhnY1ZtUWxqTEg2UzJCUmllb2pweVN2aGwwS1FVRUhjNEN2amRMc3MwVU4zN3dVMWM5Slg4SERtenFaQk1yMWx0LWtxVWtLZVVtbU4yejVEM2h6TEt0RGdfWE09",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "ae48ecde-75cd-4814-92ab-18f48719e5d9",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.412Z",
|
||||
"updated_at": "2024-10-18T10:45:26.412Z",
|
||||
"name": "Valid AWS Credentials",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5FanhHa3dXS0I3M2NmWm56SktiaGNqdDZUN0xQU1QwUi15QkhLZldFUmRENk1BXzlscG9JSUxVSTF5ekxuMkdEanlJNjhPUS1VSV9wVTBvU2l4ZnNGOVJhYW93RC1LTEhmc2pyOTJvUWwyWnpFY19WN1pRQk5IdDYwYnBDQnF1eU9nUzdwTGU3QU5qMGFyX1E4SXdpSk9paGVLcVpOVUhwb3duaXgxZ0ZxME5Pcm40QzBGWEZKY2lmRVlCMGFuVFVzemxuVjVNalZVQ2JsY2ZqNWt3Z01IYUZ0dk92YkdtSUZ5SlBvQWZoVU5DWlRFWmExNnJGVEY4Q1Bnd2VJUW9TSWdRcG9rSDNfREQwRld3Q1RYVnVYWVJLWWIxZmpsWGpwd0xQM0dtLTlYUjdHOVhhNklLWXFGTHpFQUVyVmNhYW9CU0tocGVyX3VjMkVEcVdjdFBfaVpsLTBzaUxrWTlta3dpelNtTG9xYVhBUHUzNUE4RnI1WXdJdHcxcFVfaG1XRHhDVFBKamxJb1FaQ2lsQ3FzRmxZbEJVemVkT1E2aHZfbDJqWDJPT3ViOWJGYzQ3eTNWNlFQSHBWRDFiV2tneDM4SmVqMU9Bd01TaXhPY2dmWG5RdENURkM2b2s5V3luVUZQcnFKNldnWEdYaWE2MnVNQkEwMHd6cUY5cVJkcGw4bHBtNzhPeHhkREdwSXNEc1JqQkxUR1FYRTV0UFNwbVlVSWF5LWgtbVhJZXlPZ0Q4cG9HX2E0Qld0LTF1TTFEVy1XNGdnQTRpLWpQQmFJUEdaOFJGNDVoUVJnQ25YVU5DTENMaTY4YmxtYWJFRERXTjAydVN2YnBDb3RkUE0zSDRlN1A3TXc4d2h1Wmd0LWUzZEcwMUstNUw2YnFyS2Z0NEVYMXllQW5GLVBpeU55SkNhczFIeFhrWXZpVXdwSFVrTDdiQjQtWHZJdERXVThzSnJsT2FNZzJDaUt6Y2NXYUZhUlo3VkY0R1BrSHNHNHprTmxjYmp1TXVKakRha0VtNmRFZWRmZHJWdnRCOVNjVGFVWjVQM3RwWWl4SkNmOU1pb2xqMFdOblhNY3Y3aERpOHFlWjJRc2dtRDkzZm1Qc29wdk5OQmJPbGk5ZUpGM1I2YzRJN2gxR3FEMllXR1pma1k0emVqSjZyMUliMGZsc3NfSlVDbGt4QzJTc3hHOU9FRHlZb09zVnlvcDR6WC1uclRSenI0Yy13WlFWNzJWRkwydjhmSjFZdnZ5X3NmZVF6UWRNMXo5STVyV3B0d09UUlFtOURITGhXSDVIUl9zYURJc05KWUNxekVyYkxJclNFNV9leEk4R2xsMGJod3lYeFIwaXR2dllwLTZyNWlXdDRpRkxVYkxWZFdvYUhKck5aeElBZUtKejNKS2tYVW1rTnVrRjJBQmdlZmV6ckozNjNwRmxLS1FaZzRVTTBZYzFFYi1idjBpZkQ3bWVvbEdRZXJrWFNleWZmSmFNdG1wQlp0YmxjWDV5T0tEbHRsYnNHbjRPRjl5MkttOUhRWlJtd1pmTnY4Z1lPRlZoTzFGVDdTZ0RDY1ByV0RndTd5LUNhcHNXUnNIeXdLMEw3WS1tektRTWFLQy1zakpMLWFiM3FOakE1UWU4LXlOX2VPbmd4MTZCRk9OY3Z4UGVDSWxhRlg4eHI4X1VUTDZZM0pjV0JDVi1UUjlTUl85cm1LWlZ0T1dzU0lpdWUwbXgtZ0l6eHNSNExRTV9MczJ6UkRkVElnRV9Rc0RoTDFnVHRZSEFPb2paX200TzZiRzVmRE5hOW5CTjh5Qi1WaEtueEpqRzJDY1luVWZtX1pseUpQSE5lQ0RrZ05EbWo5cU9MZ0ZkcXlqUll4UUkyejRfY2p4RXdEeC1PS1JIQVNUcmNIdkRJbzRiUktMWEQxUFM3aGNzeVFWUDdtcm5xNHlOYUU9",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555"
|
||||
}
|
||||
}
|
||||
]
|
||||
256
api/src/backend/api/fixtures/dev/3_dev_scans.json
Normal file
256
api/src/backend/api/fixtures/dev/3_dev_scans.json
Normal file
@@ -0,0 +1,256 @@
|
||||
[
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "0191e280-9d2f-71c8-9b18-487a23ba185e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "37b065f8-26b0-4218-a665-0b23d07b27d9",
|
||||
"trigger": "manual",
|
||||
"name": "test scan 1",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 5,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"inserted_at": "2024-09-01T17:25:27.050Z",
|
||||
"started_at": "2024-09-01T17:25:27.050Z",
|
||||
"updated_at": "2024-09-01T17:25:27.050Z",
|
||||
"completed_at": "2024-09-01T17:25:32.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-aa9c-73c9-bcda-f2e35c9b19d2",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "manual",
|
||||
"name": "test aws scan 2",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 20,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"inserted_at": "2024-09-02T17:24:27.050Z",
|
||||
"started_at": "2024-09-02T17:24:27.050Z",
|
||||
"updated_at": "2024-09-02T17:24:27.050Z",
|
||||
"completed_at": "2024-09-01T17:24:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-ea5b-77fd-a93f-1ed2ae12f728",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"trigger": "manual",
|
||||
"name": "test gcp scan",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 10,
|
||||
"duration": 10,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["cloudsql_instance_automated_backups"]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:26:27.050Z",
|
||||
"started_at": "2024-09-02T19:26:27.050Z",
|
||||
"updated_at": "2024-09-02T19:26:27.050Z",
|
||||
"completed_at": "2024-09-01T17:26:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-ea5b-77fd-a93f-1ed2ae12f728",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "manual",
|
||||
"name": "test aws scan",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 35,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:27:27.050Z",
|
||||
"started_at": "2024-09-02T19:27:27.050Z",
|
||||
"updated_at": "2024-09-02T19:27:27.050Z",
|
||||
"completed_at": "2024-09-01T17:27:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "c281c924-23f3-4fcc-ac63-73a22154b7de",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled aws scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["cloudformation_stack_outputs_find_secrets"]
|
||||
},
|
||||
"scheduled_at": "2030-09-02T19:20:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:24:27.050Z",
|
||||
"updated_at": "2024-09-02T19:24:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled aws scan 2",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"cloudformation_stack_outputs_find_secrets"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-08-02T19:31:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:38:27.050Z",
|
||||
"updated_at": "2024-09-02T19:38:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled gcp scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"cloudsql_instance_automated_backups",
|
||||
"iam_audit_logs_enabled"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-07-02T19:30:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:29:27.050Z",
|
||||
"updated_at": "2024-09-02T19:29:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled azure scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"aks_cluster_rbac_enabled",
|
||||
"defender_additional_email_configured_with_a_security_contact"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-08-05T19:32:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:29:27.050Z",
|
||||
"updated_at": "2024-09-02T19:29:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01929f3b-ed2e-7623-ad63-7c37cd37828f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan 1",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 19,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"duration": 7,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T10:45:57.678Z",
|
||||
"updated_at": "2024-10-18T10:46:05.127Z",
|
||||
"started_at": "2024-10-18T10:45:57.909Z",
|
||||
"completed_at": "2024-10-18T10:46:05.127Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "6dd8925f-a52d-48de-a546-d2d90db30ab1",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan azure",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "4ca7ce89-3236-41a8-a369-8937bc152af5",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan k8s",
|
||||
"provider": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01929f57-c0ee-7553-be0b-cbde006fb6f7",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan 2",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
322
api/src/backend/api/fixtures/dev/4_dev_resources.json
Normal file
322
api/src/backend/api/fixtures/dev/4_dev_resources.json
Normal file
@@ -0,0 +1,322 @@
|
||||
[
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "0234477d-0b8e-439f-87d3-ce38dff3a434",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.772Z",
|
||||
"updated_at": "2024-10-18T11:16:24.466Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root",
|
||||
"name": "",
|
||||
"region": "eu-south-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-south':6C 'iam':3A 'other':11 'root':5A 'south':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "17ce30a3-6e77-42a5-bb08-29dfcad7396a",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.882Z",
|
||||
"updated_at": "2024-10-18T11:16:24.533Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root2",
|
||||
"name": "",
|
||||
"region": "eu-west-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "1f9de587-ba5b-415a-b9b0-ceed4c6c9f32",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.091Z",
|
||||
"updated_at": "2024-10-18T11:16:24.637Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root3",
|
||||
"name": "",
|
||||
"region": "ap-northeast-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "29b35668-6dad-411d-bfec-492311889892",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.008Z",
|
||||
"updated_at": "2024-10-18T11:16:24.600Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root4",
|
||||
"name": "",
|
||||
"region": "us-west-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'us':7C 'us-west':6C 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "30505514-01d4-42bb-8b0c-471bbab27460",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:26.014Z",
|
||||
"updated_at": "2024-10-18T11:16:26.023Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root5",
|
||||
"name": "",
|
||||
"region": "us-east-1",
|
||||
"service": "account",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'account':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'us':7C 'us-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "372932f0-e4df-4968-9721-bb4f6236fae4",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.848Z",
|
||||
"updated_at": "2024-10-18T11:16:24.516Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root6",
|
||||
"name": "",
|
||||
"region": "eu-west-3",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'3':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "3a37d124-7637-43f6-9df7-e9aa7ef98c53",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.979Z",
|
||||
"updated_at": "2024-10-18T11:16:24.585Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root7",
|
||||
"name": "",
|
||||
"region": "sa-east-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'sa':7C 'sa-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "3c49318e-03c6-4f12-876f-40451ce7de3d",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.072Z",
|
||||
"updated_at": "2024-10-18T11:16:24.630Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root8",
|
||||
"name": "",
|
||||
"region": "ap-southeast-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-southeast':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'southeast':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "430bf313-8733-4bc5-ac70-5402adfce880",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.994Z",
|
||||
"updated_at": "2024-10-18T11:16:24.593Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root9",
|
||||
"name": "",
|
||||
"region": "eu-north-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-north':6C 'iam':3A 'north':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "78bd2a52-82f9-45df-90a9-4ad78254fdc4",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.055Z",
|
||||
"updated_at": "2024-10-18T11:16:24.622Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root10",
|
||||
"name": "",
|
||||
"region": "ap-northeast-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "7973e332-795e-4a74-b4d4-a53a21c98c80",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.896Z",
|
||||
"updated_at": "2024-10-18T11:16:24.542Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root11",
|
||||
"name": "",
|
||||
"region": "us-east-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'us':7C 'us-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "8ca0a188-5699-436e-80fd-e566edaeb259",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.938Z",
|
||||
"updated_at": "2024-10-18T11:16:24.565Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root12",
|
||||
"name": "",
|
||||
"region": "ca-central-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'ca':7C 'ca-central':6C 'central':8C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "8fe4514f-71d7-46ab-b0dc-70cef23b4d13",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.965Z",
|
||||
"updated_at": "2024-10-18T11:16:24.578Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root13",
|
||||
"name": "",
|
||||
"region": "eu-west-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "9ab35225-dc7c-4ebd-bbc0-d81fb5d9de77",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.909Z",
|
||||
"updated_at": "2024-10-18T11:16:24.549Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root14",
|
||||
"name": "",
|
||||
"region": "ap-south-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-south':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'south':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "9be26c1d-adf0-4ba8-9ca9-c740f4a0dc4e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.863Z",
|
||||
"updated_at": "2024-10-18T11:16:24.524Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root15",
|
||||
"name": "",
|
||||
"region": "eu-central-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'central':8C 'eu':7C 'eu-central':6C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "ba108c01-bcad-44f1-b211-c1d8985da89d",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.110Z",
|
||||
"updated_at": "2024-10-18T11:16:24.644Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root16",
|
||||
"name": "",
|
||||
"region": "ap-northeast-3",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'3':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "dc6cfb5d-6835-4c7b-9152-c18c734a6eaa",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.038Z",
|
||||
"updated_at": "2024-10-18T11:16:24.615Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root17",
|
||||
"name": "",
|
||||
"region": "eu-central-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'central':8C 'eu':7C 'eu-central':6C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e0664164-cfda-44a4-b743-acee1c69386c",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.924Z",
|
||||
"updated_at": "2024-10-18T11:16:24.557Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root18",
|
||||
"name": "",
|
||||
"region": "us-west-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'us':7C 'us-west':6C 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e1929daa-a984-4116-8131-492a48321dba",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.023Z",
|
||||
"updated_at": "2024-10-18T11:16:24.607Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root19",
|
||||
"name": "",
|
||||
"region": "ap-southeast-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-southeast':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'southeast':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e37bb1f1-1669-4bb3-be86-e3378ddfbcba",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.952Z",
|
||||
"updated_at": "2024-10-18T11:16:24.571Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:access-analyzer:us-east-1:112233445566:analyzer/ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
|
||||
"name": "",
|
||||
"region": "us-east-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9A,15C '112233445566':10A 'access':4A 'access-analyzer':3A 'accessanalyzer':16 'analyzer':5A 'analyzer/consoleanalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c':11A 'arn':1A 'aws':2A 'east':8A,14C 'other':17 'us':7A,13C 'us-east':6A,12C"
|
||||
}
|
||||
}
|
||||
]
|
||||
2537
api/src/backend/api/fixtures/dev/5_dev_findings.json
Normal file
2537
api/src/backend/api/fixtures/dev/5_dev_findings.json
Normal file
File diff suppressed because it is too large
Load Diff
153
api/src/backend/api/fixtures/dev/6_dev_rbac.json
Normal file
153
api/src/backend/api/fixtures/dev/6_dev_rbac.json
Normal file
@@ -0,0 +1,153 @@
|
||||
[
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"fields": {
|
||||
"name": "first_group",
|
||||
"inserted_at": "2024-11-13T11:36:19.503Z",
|
||||
"updated_at": "2024-11-13T11:36:19.503Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"fields": {
|
||||
"name": "second_group",
|
||||
"inserted_at": "2024-11-13T11:36:25.421Z",
|
||||
"updated_at": "2024-11-13T11:36:25.421Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "481769f5-db2b-447b-8b00-1dee18db90ec",
|
||||
"fields": {
|
||||
"name": "third_group",
|
||||
"inserted_at": "2024-11-13T11:36:37.603Z",
|
||||
"updated_at": "2024-11-13T11:36:37.603Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "13625bd3-f428-4021-ac1b-b0bd41b6e02f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-13T11:55:17.138Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "54784ebe-42d2-4937-aa6a-e21c62879567",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-13T11:55:17.138Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "c8bd52d5-42a5-48fe-8e0a-3eef154b8ebe",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"inserted_at": "2024-11-13T11:55:41.237Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "admin_test",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": true,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z",
|
||||
"updated_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "845ff03a-87ef-42ba-9786-6577c70c4df0",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "first_role",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": false,
|
||||
"manage_scans": false,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-11-20T15:31:53.239Z",
|
||||
"updated_at": "2024-11-20T15:31:53.239Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "902d726c-4bd5-413a-a2a4-f7b4754b6b20",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "third_role",
|
||||
"manage_users": false,
|
||||
"manage_account": false,
|
||||
"manage_billing": false,
|
||||
"manage_providers": false,
|
||||
"manage_integrations": false,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": false,
|
||||
"inserted_at": "2024-11-20T15:34:05.440Z",
|
||||
"updated_at": "2024-11-20T15:34:05.440Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "57fd024a-0a7f-49b4-a092-fa0979a07aaf",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "a3cd0099-1c13-4df1-a5e5-ecdfec561b35",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "481769f5-db2b-447b-8b00-1dee18db90ec",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "cfd84182-a058-40c2-af3c-0189b174940f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.userrolerelationship",
|
||||
"pk": "92339663-e954-4fd8-98fb-8bfe15949975",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"inserted_at": "2024-11-20T15:36:14.302Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
1
api/src/backend/api/fixtures/dev/7_dev_compliance.json
Normal file
1
api/src/backend/api/fixtures/dev/7_dev_compliance.json
Normal file
File diff suppressed because one or more lines are too long
237
api/src/backend/api/management/commands/findings.py
Normal file
237
api/src/backend/api/management/commands/findings.py
Normal file
@@ -0,0 +1,237 @@
|
||||
import random
|
||||
from datetime import datetime, timezone
|
||||
from math import ceil
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from tqdm import tqdm
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
Finding,
|
||||
Provider,
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
Scan,
|
||||
StatusChoices,
|
||||
)
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Populates the database with test data for performance testing."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--tenant",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Tenant id for which the data will be populated.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--resources",
|
||||
type=int,
|
||||
required=True,
|
||||
help="The number of resources to create.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--findings",
|
||||
type=int,
|
||||
required=True,
|
||||
help="The number of findings to create.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--batch", type=int, required=True, help="The batch size for bulk creation."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--alias",
|
||||
type=str,
|
||||
required=False,
|
||||
help="Optional alias for the provider and scan",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
tenant_id = options["tenant"]
|
||||
num_resources = options["resources"]
|
||||
num_findings = options["findings"]
|
||||
batch_size = options["batch"]
|
||||
alias = options["alias"] or "Testing"
|
||||
uid_token = str(uuid4())
|
||||
|
||||
self.stdout.write(self.style.NOTICE("Starting data population"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tTenant: {tenant_id}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tAlias: {alias}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tResources: {num_resources}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tFindings: {num_findings}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tBatch size: {batch_size}\n\n"))
|
||||
|
||||
# Resource metadata
|
||||
possible_regions = [
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-1",
|
||||
"us-west-2",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-south-1",
|
||||
"sa-east-1",
|
||||
]
|
||||
possible_services = []
|
||||
possible_types = []
|
||||
|
||||
bulk_check_metadata = CheckMetadata.get_bulk(provider="aws")
|
||||
for check_metadata in bulk_check_metadata.values():
|
||||
if check_metadata.ServiceName not in possible_services:
|
||||
possible_services.append(check_metadata.ServiceName)
|
||||
if (
|
||||
check_metadata.ResourceType
|
||||
and check_metadata.ResourceType not in possible_types
|
||||
):
|
||||
possible_types.append(check_metadata.ResourceType)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
provider, _ = Provider.all_objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider="aws",
|
||||
connected=True,
|
||||
uid=str(random.randint(100000000000, 999999999999)),
|
||||
defaults={
|
||||
"alias": alias,
|
||||
},
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan = Scan.all_objects.create(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
name=alias,
|
||||
trigger="manual",
|
||||
state="executing",
|
||||
progress=0,
|
||||
started_at=datetime.now(timezone.utc),
|
||||
)
|
||||
scan_state = "completed"
|
||||
|
||||
try:
|
||||
# Create resources
|
||||
resources = []
|
||||
|
||||
for i in range(num_resources):
|
||||
resources.append(
|
||||
Resource(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider.id,
|
||||
uid=f"testing-{uid_token}-{i}",
|
||||
name=f"Testing {uid_token}-{i}",
|
||||
region=random.choice(possible_regions),
|
||||
service=random.choice(possible_services),
|
||||
type=random.choice(possible_types),
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(resources) / batch_size)
|
||||
self.stdout.write(self.style.WARNING("Creating resources..."))
|
||||
for i in tqdm(range(0, len(resources), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
Resource.all_objects.bulk_create(resources[i : i + batch_size])
|
||||
self.stdout.write(self.style.SUCCESS("Resources created successfully.\n\n"))
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 33
|
||||
scan.save()
|
||||
|
||||
# Create Findings
|
||||
findings = []
|
||||
possible_deltas = ["new", "changed", None]
|
||||
possible_severities = ["critical", "high", "medium", "low"]
|
||||
findings_resources_mapping = []
|
||||
|
||||
for i in range(num_findings):
|
||||
severity = random.choice(possible_severities)
|
||||
check_id = random.randint(1, 1000)
|
||||
assigned_resource_num = random.randint(0, len(resources) - 1)
|
||||
assigned_resource = resources[assigned_resource_num]
|
||||
findings_resources_mapping.append(assigned_resource_num)
|
||||
|
||||
findings.append(
|
||||
Finding(
|
||||
tenant_id=tenant_id,
|
||||
scan=scan,
|
||||
uid=f"testing-{uid_token}-{i}",
|
||||
delta=random.choice(possible_deltas),
|
||||
check_id=f"check-{check_id}",
|
||||
status=random.choice(list(StatusChoices)),
|
||||
severity=severity,
|
||||
impact=severity,
|
||||
raw_result={},
|
||||
check_metadata={
|
||||
"checktitle": f"Test title for check {check_id}",
|
||||
"risk": f"Testing risk {uid_token}-{i}",
|
||||
"provider": "aws",
|
||||
"severity": severity,
|
||||
"categories": ["category1", "category2", "category3"],
|
||||
"description": "This is a random description that should not matter for testing purposes.",
|
||||
"servicename": assigned_resource.service,
|
||||
"resourcetype": assigned_resource.type,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(findings) / batch_size)
|
||||
self.stdout.write(self.style.WARNING("Creating findings..."))
|
||||
for i in tqdm(range(0, len(findings), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
Finding.all_objects.bulk_create(findings[i : i + batch_size])
|
||||
self.stdout.write(self.style.SUCCESS("Findings created successfully.\n\n"))
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 66
|
||||
scan.save()
|
||||
|
||||
# Create ResourceFindingMapping
|
||||
mappings = []
|
||||
for index, f in enumerate(findings):
|
||||
mappings.append(
|
||||
ResourceFindingMapping(
|
||||
tenant_id=tenant_id,
|
||||
resource=resources[findings_resources_mapping[index]],
|
||||
finding=f,
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(mappings) / batch_size)
|
||||
self.stdout.write(
|
||||
self.style.WARNING("Creating resource-finding mappings...")
|
||||
)
|
||||
for i in tqdm(range(0, len(mappings), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
ResourceFindingMapping.objects.bulk_create(
|
||||
mappings[i : i + batch_size]
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Resource-finding mappings created successfully.\n\n"
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"Failed to populate test data: {e}"))
|
||||
scan_state = "failed"
|
||||
finally:
|
||||
scan.completed_at = datetime.now(timezone.utc)
|
||||
scan.duration = int(
|
||||
(datetime.now(timezone.utc) - scan.started_at).total_seconds()
|
||||
)
|
||||
scan.progress = 100
|
||||
scan.state = scan_state
|
||||
scan.unique_resource_count = num_resources
|
||||
with rls_transaction(tenant_id):
|
||||
scan.save()
|
||||
|
||||
self.stdout.write(self.style.NOTICE("Successfully populated test data."))
|
||||
49
api/src/backend/api/middleware.py
Normal file
49
api/src/backend/api/middleware.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
from config.custom_logging import BackendLogger
|
||||
|
||||
|
||||
def extract_auth_info(request) -> dict:
|
||||
if getattr(request, "auth", None) is not None:
|
||||
tenant_id = request.auth.get("tenant_id", "N/A")
|
||||
user_id = request.auth.get("sub", "N/A")
|
||||
else:
|
||||
tenant_id, user_id = "N/A", "N/A"
|
||||
return {"tenant_id": tenant_id, "user_id": user_id}
|
||||
|
||||
|
||||
class APILoggingMiddleware:
|
||||
"""
|
||||
Middleware for logging API requests.
|
||||
|
||||
This middleware logs details of API requests, including the typical request metadata among other useful information.
|
||||
|
||||
Args:
|
||||
get_response (Callable): A callable to get the response, typically the next middleware or view.
|
||||
"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
self.logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
def __call__(self, request):
|
||||
request_start_time = time.time()
|
||||
|
||||
response = self.get_response(request)
|
||||
duration = time.time() - request_start_time
|
||||
auth_info = extract_auth_info(request)
|
||||
self.logger.info(
|
||||
"",
|
||||
extra={
|
||||
"user_id": auth_info["user_id"],
|
||||
"tenant_id": auth_info["tenant_id"],
|
||||
"method": request.method,
|
||||
"path": request.path,
|
||||
"query_params": request.GET.dict(),
|
||||
"status_code": response.status_code,
|
||||
"duration": duration,
|
||||
},
|
||||
)
|
||||
|
||||
return response
|
||||
1566
api/src/backend/api/migrations/0001_initial.py
Normal file
1566
api/src/backend/api/migrations/0001_initial.py
Normal file
File diff suppressed because it is too large
Load Diff
23
api/src/backend/api/migrations/0002_token_migrations.py
Normal file
23
api/src/backend/api/migrations/0002_token_migrations.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import DB_PROWLER_USER
|
||||
|
||||
DB_NAME = settings.DATABASES["default"]["NAME"]
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0001_initial"),
|
||||
("token_blacklist", "0012_alter_outstandingtoken_user"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
f"""
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON token_blacklist_blacklistedtoken TO {DB_PROWLER_USER};
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON token_blacklist_outstandingtoken TO {DB_PROWLER_USER};
|
||||
GRANT SELECT, DELETE ON django_admin_log TO {DB_PROWLER_USER};
|
||||
"""
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 5.1.1 on 2024-12-20 13:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0002_token_migrations"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveConstraint(
|
||||
model_name="provider",
|
||||
name="unique_provider_uids",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="provider",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider", "uid", "is_deleted"),
|
||||
name="unique_provider_uids",
|
||||
),
|
||||
),
|
||||
]
|
||||
248
api/src/backend/api/migrations/0004_rbac.py
Normal file
248
api/src/backend/api/migrations/0004_rbac.py
Normal file
@@ -0,0 +1,248 @@
|
||||
# Generated by Django 5.1.1 on 2024-12-05 12:29
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0003_update_provider_unique_constraint_with_is_deleted"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Role",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("manage_users", models.BooleanField(default=False)),
|
||||
("manage_account", models.BooleanField(default=False)),
|
||||
("manage_billing", models.BooleanField(default=False)),
|
||||
("manage_providers", models.BooleanField(default=False)),
|
||||
("manage_integrations", models.BooleanField(default=False)),
|
||||
("manage_scans", models.BooleanField(default=False)),
|
||||
("unlimited_visibility", models.BooleanField(default=False)),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "roles",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RoleProviderGroupRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_provider_group_relationship",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserRoleRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_user_relationship",
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
name="provider_group",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.providergroup"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="provider_groups",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.RoleProviderGroupRelationship",
|
||||
to="api.providergroup",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="userrolerelationship",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="userrolerelationship",
|
||||
name="user",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="users",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.UserRoleRelationship",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "provider_group_id"),
|
||||
name="unique_role_provider_group_relationship",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_roleprovidergrouprelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="userrolerelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "user_id"), name="unique_role_user_relationship"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="userrolerelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_userrolerelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="role",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "name"), name="unique_role_per_tenant"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="role",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_role",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="InvitationRoleRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"invitation",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.invitation"
|
||||
),
|
||||
),
|
||||
(
|
||||
"role",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_invitation_relationship",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="invitationrolerelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "invitation_id"),
|
||||
name="unique_role_invitation_relationship",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="invitationrolerelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_invitationrolerelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="invitations",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.InvitationRoleRelationship",
|
||||
to="api.invitation",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,44 @@
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_router import MainRouter
|
||||
|
||||
|
||||
def create_admin_role(apps, schema_editor):
|
||||
Tenant = apps.get_model("api", "Tenant")
|
||||
Role = apps.get_model("api", "Role")
|
||||
User = apps.get_model("api", "User")
|
||||
UserRoleRelationship = apps.get_model("api", "UserRoleRelationship")
|
||||
|
||||
for tenant in Tenant.objects.using(MainRouter.admin_db).all():
|
||||
admin_role, _ = Role.objects.using(MainRouter.admin_db).get_or_create(
|
||||
name="admin",
|
||||
tenant=tenant,
|
||||
defaults={
|
||||
"manage_users": True,
|
||||
"manage_account": True,
|
||||
"manage_billing": True,
|
||||
"manage_providers": True,
|
||||
"manage_integrations": True,
|
||||
"manage_scans": True,
|
||||
"unlimited_visibility": True,
|
||||
},
|
||||
)
|
||||
users = User.objects.using(MainRouter.admin_db).filter(
|
||||
membership__tenant=tenant
|
||||
)
|
||||
for user in users:
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).get_or_create(
|
||||
user=user,
|
||||
role=admin_role,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0004_rbac"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(create_admin_role),
|
||||
]
|
||||
15
api/src/backend/api/migrations/0006_findings_first_seen.py
Normal file
15
api/src/backend/api/migrations/0006_findings_first_seen.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0005_rbac_missing_admin_roles"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="first_seen_at",
|
||||
field=models.DateTimeField(editable=False, null=True),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,25 @@
|
||||
# Generated by Django 5.1.5 on 2025-01-28 15:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0006_findings_first_seen"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_id", "state", "inserted_at"],
|
||||
name="scans_prov_state_insert_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="scansummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="scan_summaries_tenant_scan_idx"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,64 @@
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Scan, StateChoices
|
||||
|
||||
|
||||
def migrate_daily_scheduled_scan_tasks(apps, schema_editor):
|
||||
for daily_scheduled_scan_task in PeriodicTask.objects.filter(
|
||||
task="scan-perform-scheduled"
|
||||
):
|
||||
task_kwargs = json.loads(daily_scheduled_scan_task.kwargs)
|
||||
tenant_id = task_kwargs["tenant_id"]
|
||||
provider_id = task_kwargs["provider_id"]
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
scheduled_time_today = datetime.combine(
|
||||
current_time.date(),
|
||||
daily_scheduled_scan_task.start_time.time(),
|
||||
tzinfo=timezone.utc,
|
||||
)
|
||||
|
||||
if current_time < scheduled_time_today:
|
||||
next_scan_date = scheduled_time_today
|
||||
else:
|
||||
next_scan_date = scheduled_time_today + timedelta(days=1)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
Scan.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
name="Daily scheduled scan",
|
||||
provider_id=provider_id,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduled_at=next_scan_date,
|
||||
scheduler_task_id=daily_scheduled_scan_task.id,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0007_scan_and_scan_summaries_indexes"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="scan",
|
||||
name="scheduler_task",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="django_celery_beat.periodictask",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(migrate_daily_scheduled_scan_tasks),
|
||||
]
|
||||
@@ -0,0 +1,22 @@
|
||||
# Generated by Django 5.1.5 on 2025-02-07 09:42
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0008_daily_scheduled_tasks_update"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="uid",
|
||||
field=models.CharField(
|
||||
max_length=250,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
verbose_name="Unique identifier for the provider, set by the provider",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,109 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import connection, migrations
|
||||
|
||||
|
||||
def create_index_on_partitions(
|
||||
apps, schema_editor, parent_table: str, index_name: str, index_details: str
|
||||
):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass;
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
# Iterate over partitions and create index concurrently.
|
||||
# Note: PostgreSQL does not allow CONCURRENTLY inside a transaction,
|
||||
# so we need atomic = False for this migration.
|
||||
for partition in partitions:
|
||||
sql = (
|
||||
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {partition.replace('.', '_')}_{index_name} ON {partition} "
|
||||
f"{index_details};"
|
||||
)
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def drop_index_on_partitions(apps, schema_editor, parent_table: str, index_name: str):
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass;
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
# Iterate over partitions and drop index concurrently.
|
||||
for partition in partitions:
|
||||
partition_index = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = f"DROP INDEX CONCURRENTLY IF EXISTS {partition_index};"
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0009_increase_provider_uid_maximum_length"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_tenant_and_id_idx",
|
||||
index_details="(tenant_id, id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_tenant_and_id_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_idx",
|
||||
index_details="(tenant_id, scan_id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_id_idx",
|
||||
index_details="(tenant_id, scan_id, id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_id_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_delta_new_idx",
|
||||
index_details="(tenant_id, id) where delta = 'new'",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_delta_new_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,49 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0010_findings_performance_indexes_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "id"], name="findings_tenant_and_id_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="find_tenant_scan_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id", "id"], name="find_tenant_scan_id_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=models.Index(
|
||||
condition=models.Q(("delta", "new")),
|
||||
fields=["tenant_id", "id"],
|
||||
name="find_delta_new_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="resourcetagmapping",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "resource_id"], name="resource_tag_tenant_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="resource",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "service", "region", "type"],
|
||||
name="resource_tenant_metadata_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
15
api/src/backend/api/migrations/0012_scan_report_output.py
Normal file
15
api/src/backend/api/migrations/0012_scan_report_output.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0011_findings_performance_indexes_parent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="scan",
|
||||
name="output_location",
|
||||
field=models.CharField(blank=True, max_length=200, null=True),
|
||||
),
|
||||
]
|
||||
35
api/src/backend/api/migrations/0013_integrations_enum.py
Normal file
35
api/src/backend/api/migrations/0013_integrations_enum.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import IntegrationTypeEnum, PostgresEnumMigration, register_enum
|
||||
from api.models import Integration
|
||||
|
||||
IntegrationTypeEnumMigration = PostgresEnumMigration(
|
||||
enum_name="integration_type",
|
||||
enum_values=tuple(
|
||||
integration_type[0]
|
||||
for integration_type in Integration.IntegrationChoices.choices
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0012_scan_report_output"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
IntegrationTypeEnumMigration.create_enum_type,
|
||||
reverse_code=IntegrationTypeEnumMigration.drop_enum_type,
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(register_enum, enum_class=IntegrationTypeEnum),
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
131
api/src/backend/api/migrations/0014_integrations.py
Normal file
131
api/src/backend/api/migrations/0014_integrations.py
Normal file
@@ -0,0 +1,131 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-03 15:46
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0013_integrations_enum"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Integration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("enabled", models.BooleanField(default=False)),
|
||||
("connected", models.BooleanField(blank=True, null=True)),
|
||||
(
|
||||
"connection_last_checked_at",
|
||||
models.DateTimeField(blank=True, null=True),
|
||||
),
|
||||
(
|
||||
"integration_type",
|
||||
api.db_utils.IntegrationTypeEnumField(
|
||||
choices=[
|
||||
("amazon_s3", "Amazon S3"),
|
||||
("saml", "SAML"),
|
||||
("aws_security_hub", "AWS Security Hub"),
|
||||
("jira", "JIRA"),
|
||||
("slack", "Slack"),
|
||||
]
|
||||
),
|
||||
),
|
||||
("configuration", models.JSONField(default=dict)),
|
||||
("_credentials", models.BinaryField(db_column="credentials")),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={"db_table": "integrations", "abstract": False},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="integration",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="IntegrationProviderRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"integration",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="api.integration",
|
||||
),
|
||||
),
|
||||
(
|
||||
"provider",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.provider"
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "integration_provider_mappings",
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("integration_id", "provider_id"),
|
||||
name="unique_integration_provider_rel",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="IntegrationProviderRelationship",
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_integrationproviderrelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="integration",
|
||||
name="providers",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
related_name="integrations",
|
||||
through="api.IntegrationProviderRelationship",
|
||||
to="api.provider",
|
||||
),
|
||||
),
|
||||
]
|
||||
26
api/src/backend/api/migrations/0015_finding_muted.py
Normal file
26
api/src/backend/api/migrations/0015_finding_muted.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-25 11:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0014_integrations"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="muted",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="finding",
|
||||
name="status",
|
||||
field=api.db_utils.StatusEnumField(
|
||||
choices=[("FAIL", "Fail"), ("PASS", "Pass"), ("MANUAL", "Manual")]
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,32 @@
|
||||
# Generated by Django 5.1.5 on 2025-03-31 10:46
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0015_finding_muted"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="compliance",
|
||||
field=models.JSONField(blank=True, default=dict, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="details",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="metadata",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="resource",
|
||||
name="partition",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
32
api/src/backend/api/migrations/0017_m365_provider.py
Normal file
32
api/src/backend/api/migrations/0017_m365_provider.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# Generated by Django 5.1.7 on 2025-04-16 08:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0016_finding_compliance_resource_details_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="provider",
|
||||
field=api.db_utils.ProviderEnumField(
|
||||
choices=[
|
||||
("aws", "AWS"),
|
||||
("azure", "Azure"),
|
||||
("gcp", "GCP"),
|
||||
("kubernetes", "Kubernetes"),
|
||||
("m365", "M365"),
|
||||
],
|
||||
default="aws",
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'm365';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
0
api/src/backend/api/migrations/__init__.py
Normal file
0
api/src/backend/api/migrations/__init__.py
Normal file
1234
api/src/backend/api/models.py
Normal file
1234
api/src/backend/api/models.py
Normal file
File diff suppressed because it is too large
Load Diff
6
api/src/backend/api/pagination.py
Normal file
6
api/src/backend/api/pagination.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from rest_framework_json_api.pagination import JsonApiPageNumberPagination
|
||||
|
||||
|
||||
class ComplianceOverviewPagination(JsonApiPageNumberPagination):
|
||||
page_size = 50
|
||||
max_page_size = 100
|
||||
203
api/src/backend/api/partitions.py
Normal file
203
api/src/backend/api/partitions.py
Normal file
@@ -0,0 +1,203 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Generator, Optional
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from django.conf import settings
|
||||
from psqlextra.partitioning import (
|
||||
PostgresPartitioningManager,
|
||||
PostgresRangePartition,
|
||||
PostgresRangePartitioningStrategy,
|
||||
PostgresTimePartitionSize,
|
||||
PostgresPartitioningError,
|
||||
)
|
||||
from psqlextra.partitioning.config import PostgresPartitioningConfig
|
||||
from uuid6 import UUID
|
||||
|
||||
from api.models import Finding, ResourceFindingMapping
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
from api.uuid_utils import datetime_to_uuid7
|
||||
|
||||
|
||||
class PostgresUUIDv7RangePartition(PostgresRangePartition):
|
||||
def __init__(
|
||||
self,
|
||||
from_values: UUID,
|
||||
to_values: UUID,
|
||||
size: PostgresTimePartitionSize,
|
||||
name_format: Optional[str] = None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
self.from_values = from_values
|
||||
self.to_values = to_values
|
||||
self.size = size
|
||||
self.name_format = name_format
|
||||
|
||||
self.rls_statements = None
|
||||
if "rls_statements" in kwargs:
|
||||
self.rls_statements = kwargs["rls_statements"]
|
||||
|
||||
start_timestamp_ms = self.from_values.time
|
||||
|
||||
self.start_datetime = datetime.fromtimestamp(
|
||||
start_timestamp_ms / 1000, timezone.utc
|
||||
)
|
||||
|
||||
def name(self) -> str:
|
||||
if not self.name_format:
|
||||
raise PostgresPartitioningError("Unknown size/unit")
|
||||
|
||||
return self.start_datetime.strftime(self.name_format).lower()
|
||||
|
||||
def deconstruct(self) -> dict:
|
||||
return {
|
||||
**super().deconstruct(),
|
||||
"size_unit": self.size.unit.value,
|
||||
"size_value": self.size.value,
|
||||
}
|
||||
|
||||
def create(
|
||||
self,
|
||||
model,
|
||||
schema_editor,
|
||||
comment,
|
||||
) -> None:
|
||||
super().create(model, schema_editor, comment)
|
||||
|
||||
# if this model has RLS statements, add them to the partition
|
||||
if isinstance(self.rls_statements, list):
|
||||
schema_editor.add_constraint(
|
||||
model,
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name=f"rls_on_{self.name()}",
|
||||
partition_name=self.name(),
|
||||
statements=self.rls_statements,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class PostgresUUIDv7PartitioningStrategy(PostgresRangePartitioningStrategy):
|
||||
def __init__(
|
||||
self,
|
||||
size: PostgresTimePartitionSize,
|
||||
count: int,
|
||||
start_date: datetime = None,
|
||||
max_age: Optional[relativedelta] = None,
|
||||
name_format: Optional[str] = None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
self.start_date = start_date.replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
self.size = size
|
||||
self.count = count
|
||||
self.max_age = max_age
|
||||
self.name_format = name_format
|
||||
|
||||
self.rls_statements = None
|
||||
if "rls_statements" in kwargs:
|
||||
self.rls_statements = kwargs["rls_statements"]
|
||||
|
||||
def to_create(self) -> Generator[PostgresUUIDv7RangePartition, None, None]:
|
||||
current_datetime = (
|
||||
self.start_date if self.start_date else self.get_start_datetime()
|
||||
)
|
||||
|
||||
for _ in range(self.count):
|
||||
end_datetime = (
|
||||
current_datetime + self.size.as_delta() - relativedelta(microseconds=1)
|
||||
)
|
||||
start_uuid7 = datetime_to_uuid7(current_datetime)
|
||||
end_uuid7 = datetime_to_uuid7(end_datetime)
|
||||
|
||||
yield PostgresUUIDv7RangePartition(
|
||||
from_values=start_uuid7,
|
||||
to_values=end_uuid7,
|
||||
size=self.size,
|
||||
name_format=self.name_format,
|
||||
rls_statements=self.rls_statements,
|
||||
)
|
||||
|
||||
current_datetime += self.size.as_delta()
|
||||
|
||||
def to_delete(self) -> Generator[PostgresUUIDv7RangePartition, None, None]:
|
||||
if not self.max_age:
|
||||
return
|
||||
|
||||
current_datetime = self.get_start_datetime() - self.max_age
|
||||
|
||||
while True:
|
||||
end_datetime = current_datetime + self.size.as_delta()
|
||||
start_uuid7 = datetime_to_uuid7(current_datetime)
|
||||
end_uuid7 = datetime_to_uuid7(end_datetime)
|
||||
|
||||
# dropping table will delete indexes and policies
|
||||
yield PostgresUUIDv7RangePartition(
|
||||
from_values=start_uuid7,
|
||||
to_values=end_uuid7,
|
||||
size=self.size,
|
||||
name_format=self.name_format,
|
||||
)
|
||||
|
||||
current_datetime -= self.size.as_delta()
|
||||
|
||||
def get_start_datetime(self) -> datetime:
|
||||
"""
|
||||
Gets the start of the current month in UTC timezone.
|
||||
|
||||
This function returns a `datetime` object set to the first day of the current
|
||||
month, at midnight (00:00:00), in UTC.
|
||||
|
||||
Returns:
|
||||
datetime: A `datetime` object representing the start of the current month in UTC.
|
||||
"""
|
||||
return datetime.now(timezone.utc).replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
|
||||
|
||||
def relative_days_or_none(value):
|
||||
if value is None:
|
||||
return None
|
||||
return relativedelta(days=value)
|
||||
|
||||
|
||||
#
|
||||
# To manage the partitions, run `python manage.py pgpartition --using admin`
|
||||
#
|
||||
# For more info on the partitioning manager, see https://github.com/SectorLabs/django-postgres-extra
|
||||
manager = PostgresPartitioningManager(
|
||||
[
|
||||
PostgresPartitioningConfig(
|
||||
model=Finding,
|
||||
strategy=PostgresUUIDv7PartitioningStrategy(
|
||||
start_date=datetime.now(timezone.utc),
|
||||
size=PostgresTimePartitionSize(
|
||||
months=settings.FINDINGS_TABLE_PARTITION_MONTHS
|
||||
),
|
||||
count=settings.FINDINGS_TABLE_PARTITION_COUNT,
|
||||
max_age=relative_days_or_none(
|
||||
settings.FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS
|
||||
),
|
||||
name_format="%Y_%b",
|
||||
rls_statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
# ResourceFindingMapping should always follow the Finding partitioning
|
||||
PostgresPartitioningConfig(
|
||||
model=ResourceFindingMapping,
|
||||
strategy=PostgresUUIDv7PartitioningStrategy(
|
||||
start_date=datetime.now(timezone.utc),
|
||||
size=PostgresTimePartitionSize(
|
||||
months=settings.FINDINGS_TABLE_PARTITION_MONTHS
|
||||
),
|
||||
count=settings.FINDINGS_TABLE_PARTITION_COUNT,
|
||||
max_age=relative_days_or_none(
|
||||
settings.FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS
|
||||
),
|
||||
name_format="%Y_%b",
|
||||
rls_statements=["SELECT"],
|
||||
),
|
||||
),
|
||||
]
|
||||
)
|
||||
75
api/src/backend/api/rbac/permissions.py
Normal file
75
api/src/backend/api/rbac/permissions.py
Normal file
@@ -0,0 +1,75 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from django.db.models import QuerySet
|
||||
from rest_framework.permissions import BasePermission
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.models import Provider, Role, User
|
||||
|
||||
|
||||
class Permissions(Enum):
|
||||
MANAGE_USERS = "manage_users"
|
||||
MANAGE_ACCOUNT = "manage_account"
|
||||
MANAGE_BILLING = "manage_billing"
|
||||
MANAGE_PROVIDERS = "manage_providers"
|
||||
MANAGE_INTEGRATIONS = "manage_integrations"
|
||||
MANAGE_SCANS = "manage_scans"
|
||||
UNLIMITED_VISIBILITY = "unlimited_visibility"
|
||||
|
||||
|
||||
class HasPermissions(BasePermission):
|
||||
"""
|
||||
Custom permission to check if the user's role has the required permissions.
|
||||
The required permissions should be specified in the view as a list in `required_permissions`.
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
required_permissions = getattr(view, "required_permissions", [])
|
||||
if not required_permissions:
|
||||
return True
|
||||
|
||||
user_roles = (
|
||||
User.objects.using(MainRouter.admin_db).get(id=request.user.id).roles.all()
|
||||
)
|
||||
if not user_roles:
|
||||
return False
|
||||
|
||||
for perm in required_permissions:
|
||||
if not getattr(user_roles[0], perm.value, False):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_role(user: User) -> Optional[Role]:
|
||||
"""
|
||||
Retrieve the first role assigned to the given user.
|
||||
|
||||
Returns:
|
||||
The user's first Role instance if the user has any roles, otherwise None.
|
||||
"""
|
||||
return user.roles.first()
|
||||
|
||||
|
||||
def get_providers(role: Role) -> QuerySet[Provider]:
|
||||
"""
|
||||
Return a distinct queryset of Providers accessible by the given role.
|
||||
|
||||
If the role has no associated provider groups, an empty queryset is returned.
|
||||
|
||||
Args:
|
||||
role: A Role instance.
|
||||
|
||||
Returns:
|
||||
A QuerySet of Provider objects filtered by the role's provider groups.
|
||||
If the role has no provider groups, returns an empty queryset.
|
||||
"""
|
||||
tenant = role.tenant
|
||||
provider_groups = role.provider_groups.all()
|
||||
if not provider_groups.exists():
|
||||
return Provider.objects.none()
|
||||
|
||||
return Provider.objects.filter(
|
||||
tenant=tenant, provider_groups__in=provider_groups
|
||||
).distinct()
|
||||
23
api/src/backend/api/renderers.py
Normal file
23
api/src/backend/api/renderers.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from contextlib import nullcontext
|
||||
|
||||
from rest_framework_json_api.renderers import JSONRenderer
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
|
||||
|
||||
class APIJSONRenderer(JSONRenderer):
|
||||
"""JSONRenderer override to apply tenant RLS when there are included resources in the request."""
|
||||
|
||||
def render(self, data, accepted_media_type=None, renderer_context=None):
|
||||
request = renderer_context.get("request")
|
||||
tenant_id = getattr(request, "tenant_id", None) if request else None
|
||||
include_param_present = "include" in request.query_params if request else False
|
||||
|
||||
# Use rls_transaction if needed for included resources, otherwise do nothing
|
||||
context_manager = (
|
||||
rls_transaction(tenant_id)
|
||||
if tenant_id and include_param_present
|
||||
else nullcontext()
|
||||
)
|
||||
with context_manager:
|
||||
return super().render(data, accepted_media_type, renderer_context)
|
||||
189
api/src/backend/api/rls.py
Normal file
189
api/src/backend/api/rls.py
Normal file
@@ -0,0 +1,189 @@
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import DEFAULT_DB_ALIAS, models
|
||||
from django.db.backends.ddl_references import Statement, Table
|
||||
|
||||
from api.db_utils import DB_USER, POSTGRES_TENANT_VAR
|
||||
|
||||
|
||||
class Tenant(models.Model):
|
||||
"""
|
||||
The Tenant is the basic grouping in the system. It is used to separate data between customers.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
db_table = "tenants"
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "tenants"
|
||||
|
||||
|
||||
class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
"""
|
||||
Model constraint to enforce row-level security on a tenant based model, in addition to the least privileges.
|
||||
|
||||
The constraint can be applied to a partitioned table by specifying the `partition_name` keyword argument.
|
||||
"""
|
||||
|
||||
rls_sql_query = """
|
||||
ALTER TABLE %(table_name)s ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE %(table_name)s FORCE ROW LEVEL SECURITY;
|
||||
"""
|
||||
|
||||
policy_sql_query = """
|
||||
CREATE POLICY %(db_user)s_%(table_name)s_{statement}
|
||||
ON %(table_name)s
|
||||
FOR {statement}
|
||||
TO %(db_user)s
|
||||
{clause} (
|
||||
CASE
|
||||
WHEN current_setting('%(tenant_setting)s', True) IS NULL THEN FALSE
|
||||
ELSE %(field_column)s = current_setting('%(tenant_setting)s')::uuid
|
||||
END
|
||||
);
|
||||
"""
|
||||
|
||||
grant_sql_query = """
|
||||
GRANT {statement} ON %(table_name)s TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_sql_query = """
|
||||
ALTER TABLE %(table_name)s NO FORCE ROW LEVEL SECURITY;
|
||||
ALTER TABLE %(table_name)s DISABLE ROW LEVEL SECURITY;
|
||||
REVOKE ALL ON TABLE %(table_name)s FROM %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_policy_sql_query = """
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(raw_table_name)s_{statement} ON %(table_name)s;
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, field: str, name: str, statements: list | None = None, **kwargs
|
||||
) -> None:
|
||||
super().__init__(name=name)
|
||||
self.target_field: str = field
|
||||
self.statements = statements or ["SELECT"]
|
||||
self.partition_name = None
|
||||
if "partition_name" in kwargs:
|
||||
self.partition_name = kwargs["partition_name"]
|
||||
|
||||
def create_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
field_column = schema_editor.quote_name(self.target_field)
|
||||
|
||||
policy_queries = ""
|
||||
grant_queries = ""
|
||||
for statement in self.statements:
|
||||
clause = f"{'WITH CHECK' if statement == 'INSERT' else 'USING'}"
|
||||
policy_queries = f"{policy_queries}{self.policy_sql_query.format(statement=statement, clause=clause)}"
|
||||
grant_queries = (
|
||||
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
|
||||
)
|
||||
|
||||
full_create_sql_query = f"{self.rls_sql_query}{policy_queries}{grant_queries}"
|
||||
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
table_name = f"{table_name}_{self.partition_name}"
|
||||
|
||||
return Statement(
|
||||
full_create_sql_query,
|
||||
table_name=table_name,
|
||||
field_column=field_column,
|
||||
db_user=DB_USER,
|
||||
tenant_setting=POSTGRES_TENANT_VAR,
|
||||
partition_name=self.partition_name,
|
||||
)
|
||||
|
||||
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
field_column = schema_editor.quote_name(self.target_field)
|
||||
raw_table_name = model._meta.db_table
|
||||
table_name = raw_table_name
|
||||
if self.partition_name:
|
||||
raw_table_name = f"{raw_table_name}_{self.partition_name}"
|
||||
table_name = raw_table_name
|
||||
|
||||
full_drop_sql_query = (
|
||||
f"{self.drop_sql_query}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement=statement) for statement in self.statements])}"
|
||||
)
|
||||
return Statement(
|
||||
full_drop_sql_query,
|
||||
table_name=Table(table_name, schema_editor.quote_name),
|
||||
raw_table_name=raw_table_name,
|
||||
field_column=field_column,
|
||||
db_user=DB_USER,
|
||||
partition_name=self.partition_name,
|
||||
)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, RowLevelSecurityConstraint):
|
||||
return self.name == other.name and self.target_field == other.target_field
|
||||
return super().__eq__(other)
|
||||
|
||||
def deconstruct(self) -> tuple[str, tuple, dict]:
|
||||
path, _, kwargs = super().deconstruct()
|
||||
return (path, (self.target_field,), kwargs)
|
||||
|
||||
def validate(self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS): # noqa: F841
|
||||
if not hasattr(instance, "tenant_id"):
|
||||
raise ValidationError(f"{model.__name__} does not have a tenant_id field.")
|
||||
|
||||
|
||||
class BaseSecurityConstraint(models.BaseConstraint):
|
||||
"""Model constraint to grant the least privileges to the API database user."""
|
||||
|
||||
grant_sql_query = """
|
||||
GRANT {statement} ON %(table_name)s TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_sql_query = """
|
||||
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, statements: list | None = None) -> None:
|
||||
super().__init__(name=name)
|
||||
self.statements = statements or ["SELECT"]
|
||||
|
||||
def create_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
grant_queries = ""
|
||||
for statement in self.statements:
|
||||
grant_queries = (
|
||||
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
|
||||
)
|
||||
|
||||
return Statement(
|
||||
grant_queries,
|
||||
table_name=model._meta.db_table,
|
||||
db_user=DB_USER,
|
||||
)
|
||||
|
||||
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
return Statement(
|
||||
self.drop_sql_query,
|
||||
table_name=Table(model._meta.db_table, schema_editor.quote_name),
|
||||
db_user=DB_USER,
|
||||
)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, BaseSecurityConstraint):
|
||||
return self.name == other.name
|
||||
return super().__eq__(other)
|
||||
|
||||
def deconstruct(self) -> tuple[str, tuple, dict]:
|
||||
path, args, kwargs = super().deconstruct()
|
||||
return path, args, kwargs
|
||||
|
||||
|
||||
class RowLevelSecurityProtectedModel(models.Model):
|
||||
tenant = models.ForeignKey("Tenant", on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
34
api/src/backend/api/signals.py
Normal file
34
api/src/backend/api/signals.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from celery import states
|
||||
from celery.signals import before_task_publish
|
||||
from config.celery import celery_app
|
||||
from django.db.models.signals import post_delete
|
||||
from django.dispatch import receiver
|
||||
from django_celery_results.backends.database import DatabaseBackend
|
||||
|
||||
from api.db_utils import delete_related_daily_task
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
def create_task_result_on_publish(sender=None, headers=None, **kwargs): # noqa: F841
|
||||
"""Celery signal to store TaskResult entries when tasks reach the broker."""
|
||||
db_result_backend = DatabaseBackend(celery_app)
|
||||
request = type("request", (object,), headers)
|
||||
|
||||
db_result_backend.store_result(
|
||||
headers["id"],
|
||||
None,
|
||||
states.PENDING,
|
||||
traceback=None,
|
||||
request=request,
|
||||
)
|
||||
|
||||
|
||||
before_task_publish.connect(
|
||||
create_task_result_on_publish, dispatch_uid="create_task_result_on_publish"
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Provider)
|
||||
def delete_provider_scan_task(sender, instance, **kwargs): # noqa: F841
|
||||
# Delete the associated periodic task when the provider is deleted
|
||||
delete_related_daily_task(instance.id)
|
||||
11810
api/src/backend/api/specs/v1.yaml
Normal file
11810
api/src/backend/api/specs/v1.yaml
Normal file
File diff suppressed because it is too large
Load Diff
0
api/src/backend/api/tests/__init__.py
Normal file
0
api/src/backend/api/tests/__init__.py
Normal file
300
api/src/backend/api/tests/integration/test_authentication.py
Normal file
300
api/src/backend/api/tests/integration/test_authentication.py
Normal file
@@ -0,0 +1,300 @@
|
||||
import pytest
|
||||
from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from api.models import Membership, User
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_authentication():
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
|
||||
# Check that a 401 is returned when no basic authentication is provided
|
||||
no_auth_response = client.get(reverse("provider-list"))
|
||||
assert no_auth_response.status_code == 401
|
||||
|
||||
# Check that we can create a new user without any kind of authentication
|
||||
user_creation_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "test",
|
||||
"email": test_user,
|
||||
"password": test_password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user_creation_response.status_code == 201
|
||||
|
||||
# Check that using our new user's credentials we can authenticate and get the providers
|
||||
access_token, _ = get_api_tokens(client, test_user, test_password)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
auth_response = client.get(
|
||||
reverse("provider-list"),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert auth_response.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_refresh_token(create_test_user, tenants_fixture):
|
||||
client = APIClient()
|
||||
|
||||
# Assert that we can obtain a new access token using the refresh one
|
||||
access_token, refresh_token = get_api_tokens(
|
||||
client, create_test_user.email, TEST_PASSWORD
|
||||
)
|
||||
valid_refresh_response = client.post(
|
||||
reverse("token-refresh"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-refresh",
|
||||
"attributes": {"refresh": refresh_token},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert valid_refresh_response.status_code == 200
|
||||
assert (
|
||||
valid_refresh_response.json()["data"]["attributes"]["refresh"] != refresh_token
|
||||
)
|
||||
|
||||
# Assert the former refresh token gets invalidated
|
||||
invalid_refresh_response = client.post(
|
||||
reverse("token-refresh"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-refresh",
|
||||
"attributes": {"refresh": refresh_token},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert invalid_refresh_response.status_code == 400
|
||||
|
||||
# Assert that the new refresh token could be used
|
||||
new_refresh_response = client.post(
|
||||
reverse("token-refresh"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-refresh",
|
||||
"attributes": {
|
||||
"refresh": valid_refresh_response.json()["data"]["attributes"][
|
||||
"refresh"
|
||||
]
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert new_refresh_response.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fixture):
|
||||
client = APIClient()
|
||||
|
||||
role = roles_fixture[0]
|
||||
|
||||
user1_email = "user1@testing.com"
|
||||
user2_email = "user2@testing.com"
|
||||
|
||||
password = "thisisapassword123"
|
||||
|
||||
user1_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "user1",
|
||||
"email": user1_email,
|
||||
"password": password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user1_response.status_code == 201
|
||||
|
||||
user1_access_token, _ = get_api_tokens(client, user1_email, password)
|
||||
user1_headers = get_authorization_header(user1_access_token)
|
||||
|
||||
user2_invitation = client.post(
|
||||
reverse("invitation-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "invitations",
|
||||
"attributes": {"email": user2_email},
|
||||
"relationships": {
|
||||
"roles": {
|
||||
"data": [
|
||||
{
|
||||
"type": "roles",
|
||||
"id": str(role.id),
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=user1_headers,
|
||||
)
|
||||
assert user2_invitation.status_code == 201
|
||||
invitation_token = user2_invitation.json()["data"]["attributes"]["token"]
|
||||
|
||||
user2_response = client.post(
|
||||
reverse("user-list") + f"?invitation_token={invitation_token}",
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "user2",
|
||||
"email": user2_email,
|
||||
"password": password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user2_response.status_code == 201
|
||||
|
||||
user2_access_token, _ = get_api_tokens(client, user2_email, password)
|
||||
user2_headers = get_authorization_header(user2_access_token)
|
||||
|
||||
user1_me = client.get(reverse("user-me"), headers=user1_headers)
|
||||
assert user1_me.status_code == 200
|
||||
assert user1_me.json()["data"]["attributes"]["email"] == user1_email
|
||||
|
||||
user2_me = client.get(reverse("user-me"), headers=user2_headers)
|
||||
assert user2_me.status_code == 200
|
||||
assert user2_me.json()["data"]["attributes"]["email"] == user2_email
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTokenSwitchTenant:
|
||||
def test_switch_tenant_with_valid_token(self, tenants_fixture, providers_fixture):
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
|
||||
# Check that we can create a new user without any kind of authentication
|
||||
user_creation_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "test",
|
||||
"email": test_user,
|
||||
"password": test_password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user_creation_response.status_code == 201
|
||||
|
||||
# Create a new relationship between this user and another tenant
|
||||
tenant_id = tenants_fixture[0].id
|
||||
user_instance = User.objects.get(email=test_user)
|
||||
Membership.objects.create(user=user_instance, tenant_id=tenant_id)
|
||||
|
||||
# Check that using our new user's credentials we can authenticate and get the providers
|
||||
access_token, _ = get_api_tokens(client, test_user, test_password)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
user_me_response = client.get(
|
||||
reverse("user-me"),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert user_me_response.status_code == 200
|
||||
# Assert this user belongs to two tenants
|
||||
assert (
|
||||
user_me_response.json()["data"]["relationships"]["memberships"]["meta"][
|
||||
"count"
|
||||
]
|
||||
== 2
|
||||
)
|
||||
|
||||
provider_response = client.get(
|
||||
reverse("provider-list"),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert provider_response.status_code == 200
|
||||
# Empty response since there are no providers in this tenant
|
||||
assert not provider_response.json()["data"]
|
||||
|
||||
switch_tenant_response = client.post(
|
||||
reverse("token-switch"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-switch-tenant",
|
||||
"attributes": {"tenant_id": tenant_id},
|
||||
}
|
||||
},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert switch_tenant_response.status_code == 200
|
||||
new_access_token = switch_tenant_response.json()["data"]["attributes"]["access"]
|
||||
new_auth_headers = get_authorization_header(new_access_token)
|
||||
|
||||
provider_response = client.get(
|
||||
reverse("provider-list"),
|
||||
headers=new_auth_headers,
|
||||
)
|
||||
assert provider_response.status_code == 200
|
||||
# Now it must be data because we switched to another tenant with providers
|
||||
assert provider_response.json()["data"]
|
||||
|
||||
def test_switch_tenant_with_invalid_token(self, create_test_user, tenants_fixture):
|
||||
client = APIClient()
|
||||
|
||||
access_token, refresh_token = get_api_tokens(
|
||||
client, create_test_user.email, TEST_PASSWORD
|
||||
)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
invalid_token_response = client.post(
|
||||
reverse("token-switch"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-switch-tenant",
|
||||
"attributes": {"tenant_id": "invalid_tenant_id"},
|
||||
}
|
||||
},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert invalid_token_response.status_code == 400
|
||||
assert invalid_token_response.json()["errors"][0]["code"] == "invalid"
|
||||
assert (
|
||||
invalid_token_response.json()["errors"][0]["detail"]
|
||||
== "Must be a valid UUID."
|
||||
)
|
||||
|
||||
invalid_tenant_response = client.post(
|
||||
reverse("token-switch"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-switch-tenant",
|
||||
"attributes": {"tenant_id": tenants_fixture[-1].id},
|
||||
}
|
||||
},
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert invalid_tenant_response.status_code == 400
|
||||
assert invalid_tenant_response.json()["errors"][0]["code"] == "invalid"
|
||||
assert invalid_tenant_response.json()["errors"][0]["detail"] == (
|
||||
"Tenant does not exist or user is not a " "member."
|
||||
)
|
||||
85
api/src/backend/api/tests/integration/test_providers.py
Normal file
85
api/src/backend/api/tests/integration/test_providers.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from conftest import get_api_tokens, get_authorization_header
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
@patch("api.v1.views.Task.objects.get")
|
||||
@patch("api.v1.views.delete_provider_task.delay")
|
||||
@pytest.mark.django_db
|
||||
def test_delete_provider_without_executing_task(
|
||||
mock_delete_task, mock_task_get, create_test_user, tenants_fixture, tasks_fixture
|
||||
):
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
task_mock.id = prowler_task.id
|
||||
mock_delete_task.return_value = task_mock
|
||||
mock_task_get.return_value = prowler_task
|
||||
|
||||
user_creation_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "test",
|
||||
"email": test_user,
|
||||
"password": test_password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user_creation_response.status_code == 201
|
||||
|
||||
access_token, _ = get_api_tokens(client, test_user, test_password)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
create_provider_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": Provider.ProviderChoices.AWS,
|
||||
"uid": "123456789012",
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert create_provider_response.status_code == 201
|
||||
provider_id = create_provider_response.json()["data"]["id"]
|
||||
provider_uid = create_provider_response.json()["data"]["attributes"]["uid"]
|
||||
|
||||
remove_provider = client.delete(
|
||||
reverse("provider-detail", kwargs={"pk": provider_id}),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert remove_provider.status_code == 202
|
||||
|
||||
recreate_provider_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": Provider.ProviderChoices.AWS,
|
||||
"uid": provider_uid,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert recreate_provider_response.status_code == 201
|
||||
98
api/src/backend/api/tests/integration/test_tenants.py
Normal file
98
api/src/backend/api/tests/integration/test_tenants.py
Normal file
@@ -0,0 +1,98 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
|
||||
from conftest import TEST_USER, TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
|
||||
|
||||
@patch("api.v1.views.schedule_provider_scan")
|
||||
@pytest.mark.django_db
|
||||
def test_check_resources_between_different_tenants(
|
||||
schedule_mock,
|
||||
enforce_test_user_db_connection,
|
||||
authenticated_api_client,
|
||||
tenants_fixture,
|
||||
set_user_admin_roles_fixture,
|
||||
):
|
||||
client = authenticated_api_client
|
||||
|
||||
tenant1 = str(tenants_fixture[0].id)
|
||||
tenant2 = str(tenants_fixture[1].id)
|
||||
|
||||
tenant1_token, _ = get_api_tokens(
|
||||
client, TEST_USER, TEST_PASSWORD, tenant_id=tenant1
|
||||
)
|
||||
tenant2_token, _ = get_api_tokens(
|
||||
client, TEST_USER, TEST_PASSWORD, tenant_id=tenant2
|
||||
)
|
||||
|
||||
tenant1_headers = get_authorization_header(tenant1_token)
|
||||
tenant2_headers = get_authorization_header(tenant2_token)
|
||||
|
||||
# Create a provider on tenant 1
|
||||
provider_data = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"alias": "test_provider_tenant_1",
|
||||
"provider": "aws",
|
||||
"uid": "123456789012",
|
||||
},
|
||||
}
|
||||
}
|
||||
provider1_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data=provider_data,
|
||||
format="vnd.api+json",
|
||||
headers=tenant1_headers,
|
||||
)
|
||||
assert provider1_response.status_code == 201
|
||||
provider1_id = provider1_response.json()["data"]["id"]
|
||||
|
||||
# Create a provider on tenant 2
|
||||
provider_data = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"alias": "test_provider_tenant_2",
|
||||
"provider": "aws",
|
||||
"uid": "123456789013",
|
||||
},
|
||||
}
|
||||
}
|
||||
provider2_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data=provider_data,
|
||||
format="vnd.api+json",
|
||||
headers=tenant2_headers,
|
||||
)
|
||||
assert provider2_response.status_code == 201
|
||||
provider2_id = provider2_response.json()["data"]["id"]
|
||||
|
||||
# Try to get the provider from tenant 1 on tenant 2 and vice versa
|
||||
tenant1_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider1_id}),
|
||||
headers=tenant2_headers,
|
||||
)
|
||||
assert tenant1_response.status_code == 404
|
||||
tenant2_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider1_id}),
|
||||
headers=tenant1_headers,
|
||||
)
|
||||
assert tenant2_response.status_code == 200
|
||||
assert tenant2_response.json()["data"]["id"] == provider1_id
|
||||
|
||||
# Vice versa
|
||||
|
||||
tenant2_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider2_id}),
|
||||
headers=tenant1_headers,
|
||||
)
|
||||
assert tenant2_response.status_code == 404
|
||||
tenant1_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider2_id}),
|
||||
headers=tenant2_headers,
|
||||
)
|
||||
assert tenant1_response.status_code == 200
|
||||
assert tenant1_response.json()["data"]["id"] == provider2_id
|
||||
284
api/src/backend/api/tests/test_compliance.py
Normal file
284
api/src/backend/api/tests/test_compliance.py
Normal file
@@ -0,0 +1,284 @@
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from api.compliance import (
|
||||
get_prowler_provider_checks,
|
||||
get_prowler_provider_compliance,
|
||||
load_prowler_compliance,
|
||||
load_prowler_checks,
|
||||
generate_scan_compliance,
|
||||
generate_compliance_overview_template,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
class TestCompliance:
|
||||
@patch("api.compliance.CheckMetadata")
|
||||
def test_get_prowler_provider_checks(self, mock_check_metadata):
|
||||
provider_type = Provider.ProviderChoices.AWS
|
||||
mock_check_metadata.get_bulk.return_value = {
|
||||
"check1": MagicMock(),
|
||||
"check2": MagicMock(),
|
||||
"check3": MagicMock(),
|
||||
}
|
||||
checks = get_prowler_provider_checks(provider_type)
|
||||
assert set(checks) == {"check1", "check2", "check3"}
|
||||
mock_check_metadata.get_bulk.assert_called_once_with(provider_type)
|
||||
|
||||
@patch("api.compliance.Compliance")
|
||||
def test_get_prowler_provider_compliance(self, mock_compliance):
|
||||
provider_type = Provider.ProviderChoices.AWS
|
||||
mock_compliance.get_bulk.return_value = {
|
||||
"compliance1": MagicMock(),
|
||||
"compliance2": MagicMock(),
|
||||
}
|
||||
compliance_data = get_prowler_provider_compliance(provider_type)
|
||||
assert compliance_data == mock_compliance.get_bulk.return_value
|
||||
mock_compliance.get_bulk.assert_called_once_with(provider_type)
|
||||
|
||||
@patch("api.models.Provider.ProviderChoices")
|
||||
@patch("api.compliance.get_prowler_provider_compliance")
|
||||
@patch("api.compliance.generate_compliance_overview_template")
|
||||
@patch("api.compliance.load_prowler_checks")
|
||||
def test_load_prowler_compliance(
|
||||
self,
|
||||
mock_load_prowler_checks,
|
||||
mock_generate_compliance_overview_template,
|
||||
mock_get_prowler_provider_compliance,
|
||||
mock_provider_choices,
|
||||
):
|
||||
mock_provider_choices.values = ["aws", "azure"]
|
||||
|
||||
compliance_data_aws = {"compliance_aws": MagicMock()}
|
||||
compliance_data_azure = {"compliance_azure": MagicMock()}
|
||||
|
||||
compliance_data_dict = {
|
||||
"aws": compliance_data_aws,
|
||||
"azure": compliance_data_azure,
|
||||
}
|
||||
|
||||
def mock_get_compliance(provider_type):
|
||||
return compliance_data_dict[provider_type]
|
||||
|
||||
mock_get_prowler_provider_compliance.side_effect = mock_get_compliance
|
||||
|
||||
mock_generate_compliance_overview_template.return_value = {
|
||||
"template_key": "template_value"
|
||||
}
|
||||
|
||||
mock_load_prowler_checks.return_value = {"checks_key": "checks_value"}
|
||||
|
||||
load_prowler_compliance()
|
||||
|
||||
from api.compliance import PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE, PROWLER_CHECKS
|
||||
|
||||
assert PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE == {
|
||||
"template_key": "template_value"
|
||||
}
|
||||
assert PROWLER_CHECKS == {"checks_key": "checks_value"}
|
||||
|
||||
expected_prowler_compliance = compliance_data_dict
|
||||
mock_get_prowler_provider_compliance.assert_any_call("aws")
|
||||
mock_get_prowler_provider_compliance.assert_any_call("azure")
|
||||
mock_generate_compliance_overview_template.assert_called_once_with(
|
||||
expected_prowler_compliance
|
||||
)
|
||||
mock_load_prowler_checks.assert_called_once_with(expected_prowler_compliance)
|
||||
|
||||
@patch("api.compliance.get_prowler_provider_checks")
|
||||
@patch("api.models.Provider.ProviderChoices")
|
||||
def test_load_prowler_checks(
|
||||
self, mock_provider_choices, mock_get_prowler_provider_checks
|
||||
):
|
||||
mock_provider_choices.values = ["aws"]
|
||||
|
||||
mock_get_prowler_provider_checks.return_value = ["check1", "check2", "check3"]
|
||||
|
||||
prowler_compliance = {
|
||||
"aws": {
|
||||
"compliance1": MagicMock(
|
||||
Requirements=[
|
||||
MagicMock(
|
||||
Checks=["check1", "check2"],
|
||||
),
|
||||
],
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
expected_checks = {
|
||||
"aws": {
|
||||
"check1": {"compliance1"},
|
||||
"check2": {"compliance1"},
|
||||
"check3": set(),
|
||||
}
|
||||
}
|
||||
|
||||
checks = load_prowler_checks(prowler_compliance)
|
||||
assert checks == expected_checks
|
||||
mock_get_prowler_provider_checks.assert_called_once_with("aws")
|
||||
|
||||
@patch("api.compliance.PROWLER_CHECKS", new_callable=dict)
|
||||
def test_generate_scan_compliance(self, mock_prowler_checks):
|
||||
mock_prowler_checks["aws"] = {
|
||||
"check1": {"compliance1"},
|
||||
"check2": {"compliance1", "compliance2"},
|
||||
}
|
||||
|
||||
compliance_overview = {
|
||||
"compliance1": {
|
||||
"requirements": {
|
||||
"requirement1": {
|
||||
"checks": {"check1": None, "check2": None},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 2,
|
||||
},
|
||||
"status": "PASS",
|
||||
}
|
||||
},
|
||||
"requirements_status": {"passed": 1, "failed": 0, "manual": 0},
|
||||
},
|
||||
"compliance2": {
|
||||
"requirements": {
|
||||
"requirement2": {
|
||||
"checks": {"check2": None},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 1,
|
||||
},
|
||||
"status": "PASS",
|
||||
}
|
||||
},
|
||||
"requirements_status": {"passed": 1, "failed": 0, "manual": 0},
|
||||
},
|
||||
}
|
||||
|
||||
provider_type = "aws"
|
||||
check_id = "check2"
|
||||
status = "FAIL"
|
||||
|
||||
generate_scan_compliance(compliance_overview, provider_type, check_id, status)
|
||||
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"][
|
||||
"checks"
|
||||
]["check2"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"][
|
||||
"checks_status"
|
||||
]["fail"]
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"]["status"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert compliance_overview["compliance1"]["requirements_status"]["passed"] == 0
|
||||
assert compliance_overview["compliance1"]["requirements_status"]["failed"] == 1
|
||||
|
||||
assert (
|
||||
compliance_overview["compliance2"]["requirements"]["requirement2"][
|
||||
"checks"
|
||||
]["check2"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance2"]["requirements"]["requirement2"][
|
||||
"checks_status"
|
||||
]["fail"]
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance2"]["requirements"]["requirement2"]["status"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert compliance_overview["compliance2"]["requirements_status"]["passed"] == 0
|
||||
assert compliance_overview["compliance2"]["requirements_status"]["failed"] == 1
|
||||
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"][
|
||||
"checks"
|
||||
]["check1"]
|
||||
is None
|
||||
)
|
||||
|
||||
@patch("api.models.Provider.ProviderChoices")
|
||||
def test_generate_compliance_overview_template(self, mock_provider_choices):
|
||||
mock_provider_choices.values = ["aws"]
|
||||
|
||||
requirement1 = MagicMock(
|
||||
Id="requirement1",
|
||||
Name="Requirement 1",
|
||||
Description="Description of requirement 1",
|
||||
Attributes=[],
|
||||
Checks=["check1", "check2"],
|
||||
)
|
||||
requirement2 = MagicMock(
|
||||
Id="requirement2",
|
||||
Name="Requirement 2",
|
||||
Description="Description of requirement 2",
|
||||
Attributes=[],
|
||||
Checks=[],
|
||||
)
|
||||
compliance1 = MagicMock(
|
||||
Requirements=[requirement1, requirement2],
|
||||
Framework="Framework 1",
|
||||
Version="1.0",
|
||||
Description="Description of compliance1",
|
||||
)
|
||||
prowler_compliance = {"aws": {"compliance1": compliance1}}
|
||||
|
||||
template = generate_compliance_overview_template(prowler_compliance)
|
||||
|
||||
expected_template = {
|
||||
"aws": {
|
||||
"compliance1": {
|
||||
"framework": "Framework 1",
|
||||
"version": "1.0",
|
||||
"provider": "aws",
|
||||
"description": "Description of compliance1",
|
||||
"requirements": {
|
||||
"requirement1": {
|
||||
"name": "Requirement 1",
|
||||
"description": "Description of requirement 1",
|
||||
"attributes": [],
|
||||
"checks": {"check1": None, "check2": None},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 2,
|
||||
},
|
||||
"status": "PASS",
|
||||
},
|
||||
"requirement2": {
|
||||
"name": "Requirement 2",
|
||||
"description": "Description of requirement 2",
|
||||
"attributes": [],
|
||||
"checks": {},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 0,
|
||||
},
|
||||
"status": "PASS",
|
||||
},
|
||||
},
|
||||
"requirements_status": {
|
||||
"passed": 1, # total_requirements - manual
|
||||
"failed": 0,
|
||||
"manual": 1, # requirement2 has 0 checks
|
||||
},
|
||||
"total_requirements": 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert template == expected_template
|
||||
31
api/src/backend/api/tests/test_database.py
Normal file
31
api/src/backend/api/tests/test_database.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
from django.db.utils import ConnectionRouter
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.rls import Tenant
|
||||
from config.django.base import DATABASE_ROUTERS as PROD_DATABASE_ROUTERS
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
@patch("api.db_router.MainRouter.admin_db", new="admin")
|
||||
class TestMainDatabaseRouter:
|
||||
@pytest.fixture(scope="module")
|
||||
def router(self):
|
||||
testing_routers = settings.DATABASE_ROUTERS.copy()
|
||||
settings.DATABASE_ROUTERS = PROD_DATABASE_ROUTERS
|
||||
yield ConnectionRouter()
|
||||
settings.DATABASE_ROUTERS = testing_routers
|
||||
|
||||
@pytest.mark.parametrize("api_model", [Tenant])
|
||||
def test_router_api_models(self, api_model, router):
|
||||
assert router.db_for_read(api_model) == "default"
|
||||
assert router.db_for_write(api_model) == "default"
|
||||
|
||||
assert router.allow_migrate_model(MainRouter.admin_db, api_model)
|
||||
assert not router.allow_migrate_model("default", api_model)
|
||||
|
||||
def test_router_django_models(self, router):
|
||||
assert router.db_for_read(MigrationRecorder.Migration) == MainRouter.admin_db
|
||||
assert not router.db_for_read(MigrationRecorder.Migration) == "default"
|
||||
140
api/src/backend/api/tests/test_db_utils.py
Normal file
140
api/src/backend/api/tests/test_db_utils.py
Normal file
@@ -0,0 +1,140 @@
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from api.db_utils import (
|
||||
batch_delete,
|
||||
enum_to_choices,
|
||||
generate_random_token,
|
||||
one_week_from_now,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
class TestEnumToChoices:
|
||||
def test_enum_to_choices_simple(self):
|
||||
class Color(Enum):
|
||||
RED = 1
|
||||
GREEN = 2
|
||||
BLUE = 3
|
||||
|
||||
expected_result = [
|
||||
(1, "Red"),
|
||||
(2, "Green"),
|
||||
(3, "Blue"),
|
||||
]
|
||||
|
||||
result = enum_to_choices(Color)
|
||||
assert result == expected_result
|
||||
|
||||
def test_enum_to_choices_with_underscores(self):
|
||||
class Status(Enum):
|
||||
PENDING_APPROVAL = "pending"
|
||||
IN_PROGRESS = "in_progress"
|
||||
COMPLETED_SUCCESSFULLY = "completed"
|
||||
|
||||
expected_result = [
|
||||
("pending", "Pending Approval"),
|
||||
("in_progress", "In Progress"),
|
||||
("completed", "Completed Successfully"),
|
||||
]
|
||||
|
||||
result = enum_to_choices(Status)
|
||||
assert result == expected_result
|
||||
|
||||
def test_enum_to_choices_empty_enum(self):
|
||||
class EmptyEnum(Enum):
|
||||
pass
|
||||
|
||||
expected_result = []
|
||||
|
||||
result = enum_to_choices(EmptyEnum)
|
||||
assert result == expected_result
|
||||
|
||||
def test_enum_to_choices_numeric_values(self):
|
||||
class Numbers(Enum):
|
||||
ONE = 1
|
||||
TWO = 2
|
||||
THREE = 3
|
||||
|
||||
expected_result = [
|
||||
(1, "One"),
|
||||
(2, "Two"),
|
||||
(3, "Three"),
|
||||
]
|
||||
|
||||
result = enum_to_choices(Numbers)
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
class TestOneWeekFromNow:
|
||||
def test_one_week_from_now(self):
|
||||
with patch("api.db_utils.datetime") as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(2023, 1, 1, tzinfo=timezone.utc)
|
||||
expected_result = datetime(2023, 1, 8, tzinfo=timezone.utc)
|
||||
|
||||
result = one_week_from_now()
|
||||
assert result == expected_result
|
||||
|
||||
def test_one_week_from_now_with_timezone(self):
|
||||
with patch("api.db_utils.datetime") as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(
|
||||
2023, 6, 15, 12, 0, tzinfo=timezone.utc
|
||||
)
|
||||
expected_result = datetime(2023, 6, 22, 12, 0, tzinfo=timezone.utc)
|
||||
|
||||
result = one_week_from_now()
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
class TestGenerateRandomToken:
|
||||
def test_generate_random_token_default_length(self):
|
||||
token = generate_random_token()
|
||||
assert len(token) == 14
|
||||
|
||||
def test_generate_random_token_custom_length(self):
|
||||
length = 20
|
||||
token = generate_random_token(length=length)
|
||||
assert len(token) == length
|
||||
|
||||
def test_generate_random_token_with_symbols(self):
|
||||
symbols = "ABC123"
|
||||
token = generate_random_token(length=10, symbols=symbols)
|
||||
assert len(token) == 10
|
||||
assert all(char in symbols for char in token)
|
||||
|
||||
def test_generate_random_token_unique(self):
|
||||
tokens = {generate_random_token() for _ in range(1000)}
|
||||
# Assuming that generating 1000 tokens should result in unique values
|
||||
assert len(tokens) == 1000
|
||||
|
||||
def test_generate_random_token_no_symbols_provided(self):
|
||||
token = generate_random_token(length=5, symbols="")
|
||||
# Default symbols
|
||||
assert len(token) == 5
|
||||
|
||||
|
||||
class TestBatchDelete:
|
||||
@pytest.fixture
|
||||
def create_test_providers(self, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
provider_id = 123456789012
|
||||
provider_count = 10
|
||||
for i in range(provider_count):
|
||||
Provider.objects.create(
|
||||
tenant=tenant,
|
||||
uid=f"{provider_id + i}",
|
||||
provider=Provider.ProviderChoices.AWS,
|
||||
)
|
||||
return provider_count
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_batch_delete(self, tenants_fixture, create_test_providers):
|
||||
tenant_id = str(tenants_fixture[0].id)
|
||||
_, summary = batch_delete(
|
||||
tenant_id, Provider.objects.all(), batch_size=create_test_providers // 2
|
||||
)
|
||||
assert Provider.objects.all().count() == 0
|
||||
assert summary == {"api.Provider": create_test_providers}
|
||||
36
api/src/backend/api/tests/test_decorators.py
Normal file
36
api/src/backend/api/tests/test_decorators.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import uuid
|
||||
from unittest.mock import call, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
from api.decorators import set_tenant
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestSetTenantDecorator:
|
||||
@patch("api.decorators.connection.cursor")
|
||||
def test_set_tenant(self, mock_cursor):
|
||||
mock_cursor.return_value.__enter__.return_value = mock_cursor
|
||||
|
||||
@set_tenant
|
||||
def random_func(arg):
|
||||
return arg
|
||||
|
||||
tenant_id = str(uuid.uuid4())
|
||||
|
||||
result = random_func("test_arg", tenant_id=tenant_id)
|
||||
|
||||
assert (
|
||||
call(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
in mock_cursor.execute.mock_calls
|
||||
)
|
||||
assert result == "test_arg"
|
||||
|
||||
def test_set_tenant_exception(self):
|
||||
@set_tenant
|
||||
def random_func(arg):
|
||||
return arg
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
random_func("test_arg")
|
||||
54
api/src/backend/api/tests/test_middleware.py
Normal file
54
api/src/backend/api/tests/test_middleware.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from django.http import HttpResponse
|
||||
from django.test import RequestFactory
|
||||
|
||||
from api.middleware import APILoggingMiddleware
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("logging.getLogger")
|
||||
def test_api_logging_middleware_logging(mock_logger):
|
||||
factory = RequestFactory()
|
||||
|
||||
request = factory.get("/test-path?param1=value1¶m2=value2")
|
||||
request.method = "GET"
|
||||
|
||||
response = HttpResponse()
|
||||
response.status_code = 200
|
||||
|
||||
get_response = MagicMock(return_value=response)
|
||||
|
||||
with patch("api.middleware.extract_auth_info") as mock_extract_auth_info:
|
||||
mock_extract_auth_info.return_value = {
|
||||
"user_id": "user123",
|
||||
"tenant_id": "tenant456",
|
||||
}
|
||||
|
||||
with patch("api.middleware.logging.getLogger") as mock_get_logger:
|
||||
mock_logger = MagicMock()
|
||||
mock_get_logger.return_value = mock_logger
|
||||
|
||||
middleware = APILoggingMiddleware(get_response)
|
||||
|
||||
with patch("api.middleware.time.time") as mock_time:
|
||||
mock_time.side_effect = [1000.0, 1001.0] # Start time and end time
|
||||
|
||||
middleware(request)
|
||||
|
||||
get_response.assert_called_once_with(request)
|
||||
|
||||
mock_extract_auth_info.assert_called_once_with(request)
|
||||
|
||||
expected_extra = {
|
||||
"user_id": "user123",
|
||||
"tenant_id": "tenant456",
|
||||
"method": "GET",
|
||||
"path": "/test-path",
|
||||
"query_params": {"param1": "value1", "param2": "value2"},
|
||||
"status_code": 200,
|
||||
"duration": 1.0,
|
||||
}
|
||||
|
||||
mock_logger.info.assert_called_once_with("", extra=expected_extra)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user