feat(dashboards): add new Prowler dashboards (#3575)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
6
.gitignore
vendored
@@ -9,8 +9,9 @@
|
||||
__pycache__
|
||||
venv/
|
||||
build/
|
||||
dist/
|
||||
/dist/
|
||||
*.egg-info/
|
||||
*/__pycache__/*.pyc
|
||||
|
||||
# Session
|
||||
Session.vim
|
||||
@@ -51,3 +52,6 @@ junit-reports/
|
||||
.coverage*
|
||||
.coverage
|
||||
coverage*
|
||||
|
||||
# Node
|
||||
node_modules
|
||||
|
||||
172
dashboard/__main__.py
Normal file
@@ -0,0 +1,172 @@
|
||||
# Importing Packages
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import click
|
||||
import dash
|
||||
import dash_bootstrap_components as dbc
|
||||
from colorama import Fore, Style
|
||||
from dash import dcc, html
|
||||
from dash.dependencies import Input, Output
|
||||
|
||||
from dashboard.config import folder_path_overview
|
||||
from prowler.config.config import orange_color
|
||||
from prowler.lib.banner import print_banner
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
cli = sys.modules["flask.cli"]
|
||||
print_banner(verbose=False)
|
||||
print(
|
||||
f"{Fore.GREEN}Loading all CSV files from the folder {folder_path_overview} ...\n{Style.RESET_ALL}"
|
||||
)
|
||||
cli.show_server_banner = lambda *x: click.echo(
|
||||
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are a {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} customer and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
|
||||
)
|
||||
|
||||
# Initialize the app - incorporate css
|
||||
dashboard = dash.Dash(
|
||||
__name__,
|
||||
external_stylesheets=[dbc.themes.DARKLY],
|
||||
use_pages=True,
|
||||
suppress_callback_exceptions=True,
|
||||
title="Prowler Dashboard",
|
||||
)
|
||||
|
||||
# Logo
|
||||
prowler_logo = html.Img(
|
||||
src="https://prowler.com/logo-dashboard.png", alt="Prowler Logo"
|
||||
)
|
||||
|
||||
menu_icons = {
|
||||
"overview": "/assets/images/icons/overview.svg",
|
||||
"compliance": "/assets/images/icons/compliance.svg",
|
||||
}
|
||||
|
||||
|
||||
# Function to generate navigation links
|
||||
def generate_nav_links(current_path):
|
||||
nav_links = []
|
||||
for page in dash.page_registry.values():
|
||||
# Gets the icon URL based on the page name
|
||||
icon_url = menu_icons.get(page["name"].lower())
|
||||
is_active = (
|
||||
" bg-prowler-stone-950 border-r-4 border-solid border-prowler-lime"
|
||||
if current_path == page["relative_path"]
|
||||
else ""
|
||||
)
|
||||
link_class = f"block hover:bg-prowler-stone-950 hover:border-r-4 hover:border-solid hover:border-prowler-lime{is_active}"
|
||||
|
||||
link_content = html.Span(
|
||||
[
|
||||
html.Img(src=icon_url, className="w-5"),
|
||||
html.Span(page["name"], className="font-medium text-base leading-6"),
|
||||
],
|
||||
className="flex justify-center lg:justify-normal items-center gap-x-3 py-2 px-3",
|
||||
)
|
||||
|
||||
nav_link = html.Li(
|
||||
dcc.Link(link_content, href=page["relative_path"], className=link_class)
|
||||
)
|
||||
nav_links.append(nav_link)
|
||||
return nav_links
|
||||
|
||||
|
||||
def generate_help_menu():
|
||||
help_links = [
|
||||
{
|
||||
"title": "Help",
|
||||
"url": "https://github.com/prowler-cloud/prowler/issues",
|
||||
"icon": "/assets/images/icons/help.png",
|
||||
},
|
||||
{
|
||||
"title": "Docs",
|
||||
"url": "https://docs.prowler.com",
|
||||
"icon": "/assets/images/icons/docs.png",
|
||||
},
|
||||
]
|
||||
|
||||
link_class = "block hover:bg-prowler-stone-950 hover:border-r-4 hover:border-solid hover:border-prowler-lime"
|
||||
|
||||
menu_items = []
|
||||
for link in help_links:
|
||||
menu_item = html.Li(
|
||||
html.A(
|
||||
html.Span(
|
||||
[
|
||||
html.Img(src=link["icon"], className="w-5"),
|
||||
html.Span(
|
||||
link["title"], className="font-medium text-base leading-6"
|
||||
),
|
||||
],
|
||||
className="flex items-center gap-x-3 py-2 px-3",
|
||||
),
|
||||
href=link["url"],
|
||||
target="_blank",
|
||||
className=link_class,
|
||||
)
|
||||
)
|
||||
menu_items.append(menu_item)
|
||||
|
||||
return menu_items
|
||||
|
||||
|
||||
# Layout
|
||||
dashboard.layout = html.Div(
|
||||
[
|
||||
dcc.Location(id="url", refresh=False),
|
||||
html.Link(rel="icon", href="assets/favicon.ico"),
|
||||
# Placeholder for dynamic navigation bar
|
||||
html.Div(
|
||||
[
|
||||
html.Div(id="navigation-bar", className="bg-prowler-stone-900"),
|
||||
html.Div(
|
||||
[
|
||||
dash.page_container,
|
||||
],
|
||||
id="content_select",
|
||||
className="bg-prowler-white w-full col-span-11 h-screen mx-auto overflow-y-scroll no-scrollbar px-10 py-7",
|
||||
),
|
||||
],
|
||||
className="grid custom-grid 2xl:custom-grid-large h-screen",
|
||||
),
|
||||
],
|
||||
className="h-screen mx-auto",
|
||||
)
|
||||
|
||||
|
||||
# Callback to update navigation bar
|
||||
@dashboard.callback(Output("navigation-bar", "children"), [Input("url", "pathname")])
|
||||
def update_nav_bar(pathname):
|
||||
return html.Div(
|
||||
[
|
||||
html.Div([prowler_logo], className="mb-8 px-3"),
|
||||
html.H6(
|
||||
"Dashboards",
|
||||
className="px-3 text-prowler-stone-500 text-sm opacity-90 font-regular mb-2",
|
||||
),
|
||||
html.Nav(
|
||||
[html.Ul(generate_nav_links(pathname), className="")],
|
||||
className="flex flex-col gap-y-6",
|
||||
),
|
||||
html.A(
|
||||
[
|
||||
html.Span(
|
||||
[
|
||||
html.Img(src="assets/favicon.ico", className="w-5"),
|
||||
"Subscribe to prowler SaaS",
|
||||
],
|
||||
className="flex items-center gap-x-3",
|
||||
),
|
||||
],
|
||||
href="https://prowler.com/",
|
||||
target="_blank",
|
||||
className="block mt-300 px-3 py-3 uppercase text-xs hover:bg-prowler-stone-950 hover:border-r-4 hover:border-solid hover:border-prowler-lime",
|
||||
),
|
||||
html.Nav(
|
||||
[html.Ul(generate_help_menu(), className="")],
|
||||
className="flex flex-col gap-y-6 mt-auto",
|
||||
),
|
||||
],
|
||||
className="flex flex-col bg-prowler-stone-900 py-7 h-full",
|
||||
)
|
||||
BIN
dashboard/assets/favicon.ico
Normal file
|
After Width: | Height: | Size: 15 KiB |
4
dashboard/assets/images/icons/compliance.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="#FFF" aria-hidden="true" class="h-5 w-5" viewBox="0 0 24 24">
|
||||
<path fill-rule="evenodd" d="M9 1.5H5.625c-1.036 0-1.875.84-1.875 1.875v17.25c0 1.035.84 1.875 1.875 1.875h12.75c1.035 0 1.875-.84 1.875-1.875V12.75A3.75 3.75 0 0 0 16.5 9h-1.875a1.875 1.875 0 0 1-1.875-1.875V5.25A3.75 3.75 0 0 0 9 1.5zm6.61 10.936a.75.75 0 1 0-1.22-.872l-3.236 4.53L9.53 14.47a.75.75 0 0 0-1.06 1.06l2.25 2.25a.75.75 0 0 0 1.14-.094l3.75-5.25z" clip-rule="evenodd"/>
|
||||
<path d="M12.971 1.816A5.23 5.23 0 0 1 14.25 5.25v1.875c0 .207.168.375.375.375H16.5a5.23 5.23 0 0 1 3.434 1.279 9.768 9.768 0 0 0-6.963-6.963z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 650 B |
BIN
dashboard/assets/images/icons/docs.png
Normal file
|
After Width: | Height: | Size: 734 B |
BIN
dashboard/assets/images/icons/help-black.png
Normal file
|
After Width: | Height: | Size: 441 B |
BIN
dashboard/assets/images/icons/help.png
Normal file
|
After Width: | Height: | Size: 934 B |
4
dashboard/assets/images/icons/overview.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="#FFF" aria-hidden="true" class="h-5 w-5" viewBox="0 0 24 24">
|
||||
<path fill-rule="evenodd" d="M2.25 13.5a8.25 8.25 0 0 1 8.25-8.25.75.75 0 0 1 .75.75v6.75H18a.75.75 0 0 1 .75.75 8.25 8.25 0 0 1-16.5 0z" clip-rule="evenodd"/>
|
||||
<path fill-rule="evenodd" d="M12.75 3a.75.75 0 0 1 .75-.75 8.25 8.25 0 0 1 8.25 8.25.75.75 0 0 1-.75.75h-7.5a.75.75 0 0 1-.75-.75V3z" clip-rule="evenodd"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 435 B |
BIN
dashboard/assets/images/providers/aws_provider.png
Normal file
|
After Width: | Height: | Size: 10 KiB |
BIN
dashboard/assets/images/providers/azure_provider.png
Normal file
|
After Width: | Height: | Size: 6.0 KiB |
BIN
dashboard/assets/images/providers/gcp_provider.png
Normal file
|
After Width: | Height: | Size: 245 KiB |
BIN
dashboard/assets/images/providers/k8s_provider.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
dashboard/assets/logo.png
Normal file
|
After Width: | Height: | Size: 11 KiB |
1371
dashboard/assets/styles/dist/output.css
vendored
Normal file
2221
dashboard/common_methods.py
Normal file
23
dashboard/compliance/aws_account_security_onboarding_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
@@ -0,0 +1,22 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_NAME",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_NAME", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_NAME",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ATTRIBUTES_NAME"
|
||||
)
|
||||
24
dashboard/compliance/cis_1_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_1_5_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_1_8_kubernetes.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_0_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_0_azure.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_0_gcp.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_1_azure.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_3_0_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
23
dashboard/compliance/cisa_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
29
dashboard/compliance/ens_rd2022_aws.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_ens
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_MARCO",
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORIA",
|
||||
"REQUIREMENTS_ATTRIBUTES_IDGRUPOCONTROL",
|
||||
"REQUIREMENTS_ATTRIBUTES_TIPO",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_ens(
|
||||
aux,
|
||||
"REQUIREMENTS_ATTRIBUTES_MARCO",
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORIA",
|
||||
"REQUIREMENTS_ATTRIBUTES_IDGRUPOCONTROL",
|
||||
"REQUIREMENTS_ATTRIBUTES_TIPO",
|
||||
)
|
||||
24
dashboard/compliance/fedramp_low_revision_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/fedramp_moderate_revision_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/ffiec_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
23
dashboard/compliance/gdpr_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/gxp_21_cfr_part_11_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
23
dashboard/compliance/gxp_eu_annex_11_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/hipaa_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
23
dashboard/compliance/iso27001_2013_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_container_iso
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORY",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_NAME",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_container_iso(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_CATEGORY", "REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID"
|
||||
)
|
||||
23
dashboard/compliance/mitre_attack_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_SUBTECHNIQUES",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_SUBTECHNIQUES"
|
||||
)
|
||||
24
dashboard/compliance/nist_800_171_revision_2_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/nist_800_53_revision_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/nist_800_53_revision_5_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/nist_csf_1_1_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
20
dashboard/compliance/pci_3_2_1_aws.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_pci
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_pci(aux, "REQUIREMENTS_ID")
|
||||
20
dashboard/compliance/rbi_cyber_security_framework_aws.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_rbi
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_containers_rbi(aux, "REQUIREMENTS_ID")
|
||||
24
dashboard/compliance/soc2_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
11
dashboard/config.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import os
|
||||
|
||||
# Emojis to be used in the compliance table
|
||||
pass_emoji = "✅"
|
||||
fail_emoji = "❌"
|
||||
info_emoji = "ℹ️"
|
||||
manual_emoji = "✋🏽"
|
||||
|
||||
# Folder output path
|
||||
folder_path_overview = os.getcwd() + "/output"
|
||||
folder_path_compliance = os.getcwd() + "/output/compliance"
|
||||
157
dashboard/lib/cards.py
Normal file
@@ -0,0 +1,157 @@
|
||||
from typing import List
|
||||
|
||||
from dash import html
|
||||
|
||||
|
||||
def create_provider_card(
|
||||
provider: str, provider_logo: str, account_type: str, filtered_data
|
||||
) -> List[html.Div]:
|
||||
"""
|
||||
Card to display the provider's name and icon.
|
||||
Args:
|
||||
provider (str): Name of the provider.
|
||||
provider_icon (str): Icon of the provider.
|
||||
Returns:
|
||||
html.Div: Card to display the provider's name and icon.
|
||||
"""
|
||||
accounts = len(
|
||||
filtered_data[filtered_data["PROVIDER"] == provider]["ACCOUNT_UID"].unique()
|
||||
)
|
||||
checks_executed = len(
|
||||
filtered_data[filtered_data["PROVIDER"] == provider]["CHECK_ID"].unique()
|
||||
)
|
||||
fails = len(
|
||||
filtered_data[
|
||||
(filtered_data["PROVIDER"] == provider)
|
||||
& (filtered_data["STATUS"] == "FAIL")
|
||||
]
|
||||
)
|
||||
passes = len(
|
||||
filtered_data[
|
||||
(filtered_data["PROVIDER"] == provider)
|
||||
& (filtered_data["STATUS"] == "PASS")
|
||||
]
|
||||
)
|
||||
# Take the values in the MUTED colum that are true for the provider
|
||||
if "MUTED" in filtered_data.columns:
|
||||
muted = len(
|
||||
filtered_data[
|
||||
(filtered_data["PROVIDER"] == provider)
|
||||
& (filtered_data["MUTED"] == "True")
|
||||
]
|
||||
)
|
||||
else:
|
||||
muted = 0
|
||||
|
||||
return [
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Div([provider_logo], className="w-8"),
|
||||
],
|
||||
className="p-2 shadow-box-up rounded-full",
|
||||
),
|
||||
html.H5(
|
||||
f"{provider.upper()} {account_type}",
|
||||
className="text-base font-semibold leading-snug tracking-normal text-gray-900",
|
||||
),
|
||||
],
|
||||
className="flex justify-between items-center mb-3",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
account_type,
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
accounts,
|
||||
className="inline-block text-xs text-prowler-stone-900 font-bold shadow-box-down px-4 py-1 rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"Checks",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
checks_executed,
|
||||
className="inline-block text-xs text-prowler-stone-900 font-bold shadow-box-down px-4 py-1 rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"FAILED",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
fails,
|
||||
className="m-[2px] px-4 py-1 rounded-lg bg-gradient-failed",
|
||||
),
|
||||
],
|
||||
className="inline-block text-xs font-bold shadow-box-down rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"PASSED",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
passes,
|
||||
className="m-[2px] px-4 py-1 rounded-lg bg-gradient-passed",
|
||||
),
|
||||
],
|
||||
className="inline-block text-xs font-bold shadow-box-down rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"MUTED",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
muted,
|
||||
className="m-[2px] px-4 py-1 rounded-lg bg-gradient-muted",
|
||||
),
|
||||
],
|
||||
className="inline-block text-xs font-bold shadow-box-down rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-8 gap-y-4",
|
||||
),
|
||||
],
|
||||
className="px-4 py-3",
|
||||
),
|
||||
],
|
||||
className="relative flex flex-col bg-white shadow-provider rounded-xl w-full transition ease-in-out delay-100 hover:-translate-y-1 hover:scale-110 hover:z-50 hover:cursor-pointer",
|
||||
)
|
||||
]
|
||||
195
dashboard/lib/dropdowns.py
Normal file
@@ -0,0 +1,195 @@
|
||||
from dash import dcc, html
|
||||
|
||||
|
||||
def create_date_dropdown(assesment_times: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the date of the last available scan for each account.
|
||||
Args:
|
||||
assesment_times (list): List of dates of the last available scan for each account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the date of the last available scan for each account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Assessment date (last available scan) ",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
html.Img(
|
||||
id="info-file-over",
|
||||
src="/assets/images/icons/help-black.png",
|
||||
className="w-5",
|
||||
title="The date of the last available scan for each account is displayed here. If you have not run prowler yet, the date will be empty.",
|
||||
),
|
||||
],
|
||||
style={"display": "inline-flex"},
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="report-date-filter",
|
||||
options=[
|
||||
{"label": account, "value": account} for account in assesment_times
|
||||
],
|
||||
value=assesment_times[0],
|
||||
clearable=False,
|
||||
multi=False,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_date_dropdown_compliance(assesment_times: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the date of the last available scan for each account.
|
||||
Args:
|
||||
assesment_times (list): List of dates of the last available scan for each account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the date of the last available scan for each account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Assesment Date:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="date-filter-analytics",
|
||||
options=[
|
||||
{"label": account, "value": account} for account in assesment_times
|
||||
],
|
||||
value=assesment_times[0],
|
||||
clearable=False,
|
||||
multi=False,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_region_dropdown(regions: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the region of the account.
|
||||
Args:
|
||||
regions (list): List of regions of the account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the region of the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Region:",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="region-filter",
|
||||
options=[{"label": region, "value": region} for region in regions],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_region_dropdown_compliance(regions: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the region of the account.
|
||||
Args:
|
||||
regions (list): List of regions of the account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the region of the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Region:",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="region-filter-compliance",
|
||||
options=[{"label": region, "value": region} for region in regions],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_account_dropdown(accounts: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the account.
|
||||
Args:
|
||||
accounts (list): List of accounts.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Account:",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="cloud-account-filter",
|
||||
options=[{"label": account, "value": account} for account in accounts],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_account_dropdown_compliance(accounts: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the account.
|
||||
Args:
|
||||
accounts (list): List of accounts.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Account:",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="cloud-account-filter-compliance",
|
||||
options=[{"label": account, "value": account} for account in accounts],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_compliance_dropdown(compliance: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the compliance.
|
||||
Args:
|
||||
compliance (list): List of compliance.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the compliance.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Compliance:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="report-compliance-filter",
|
||||
options=[{"label": i, "value": i} for i in compliance],
|
||||
value=compliance[0],
|
||||
clearable=False,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
157
dashboard/lib/layouts.py
Normal file
@@ -0,0 +1,157 @@
|
||||
from dash import dcc, html
|
||||
|
||||
|
||||
def create_layout_overview(
|
||||
account_dropdown: html.Div,
|
||||
date_dropdown: html.Div,
|
||||
region_dropdown: html.Div,
|
||||
download_button: html.Button,
|
||||
) -> html.Div:
|
||||
"""
|
||||
Create the layout of the dashboard.
|
||||
Args:
|
||||
account_dropdown (html.Div): Dropdown to select the account.
|
||||
date_dropdown (html.Div): Dropdown to select the date of the last available scan for each account.
|
||||
region_dropdown (html.Div): Dropdown to select the region of the account.
|
||||
Returns:
|
||||
html.Div: Layout of the dashboard.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
dcc.Location(id="url", refresh=False),
|
||||
html.Div(
|
||||
[
|
||||
html.H1(
|
||||
"Scan Overview",
|
||||
className="text-prowler-stone-900 text-2xxl font-bold",
|
||||
),
|
||||
html.Div(className="d-flex flex-wrap", id="subscribe_card"),
|
||||
],
|
||||
className="flex justify-between border-b border-prowler-500 pb-3",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div([date_dropdown], className=""),
|
||||
html.Div([account_dropdown], className=""),
|
||||
html.Div([region_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-3 lg:gap-y-0",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(className="flex", id="aws_card"),
|
||||
html.Div(className="flex", id="azure_card"),
|
||||
html.Div(className="flex", id="gcp_card"),
|
||||
html.Div(className="flex", id="k8s_card"),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-4 lg:gap-y-0",
|
||||
),
|
||||
html.H4(
|
||||
"Count of Failed Findings by severity",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 sm:col-span-6 lg:col-span-3 gap-y-4",
|
||||
id="status_graph",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 sm:col-span-6 lg:col-span-3 gap-y-4",
|
||||
id="two_pie_chart",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 sm:col-span-6 lg:col-span-6 col-end-13 gap-y-4",
|
||||
id="line_plot",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 grid-cols-12 lg:gap-y-0",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.H4(
|
||||
"Top 25 Failed Findings by Severity",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
),
|
||||
download_button,
|
||||
dcc.Download(id="download-data"),
|
||||
],
|
||||
className="flex justify-between items-center",
|
||||
),
|
||||
html.Div(id="table", className="grid"),
|
||||
html.Img(
|
||||
referrerPolicy="no-referrer-when-downgrade",
|
||||
src="https://prowlerpro.matomo.cloud/matomo.php?idsite=2&rec=1&action_name=dashboard-overview",
|
||||
alt="",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-8 gap-y-8 2xl:container mx-auto",
|
||||
)
|
||||
|
||||
|
||||
def create_layout_compliance(
|
||||
account_dropdown: html.Div,
|
||||
date_dropdown: html.Div,
|
||||
region_dropdown: html.Div,
|
||||
compliance_dropdown: html.Div,
|
||||
) -> html.Div:
|
||||
return html.Div(
|
||||
[
|
||||
dcc.Location(id="url", refresh=False),
|
||||
html.Div(
|
||||
[
|
||||
html.H1(
|
||||
"Compliance",
|
||||
className="text-prowler-stone-900 text-2xxl font-bold",
|
||||
),
|
||||
html.A(
|
||||
[
|
||||
html.Img(src="assets/favicon.ico", className="w-5 mr-3"),
|
||||
html.Span("Subscribe to prowler SaaS"),
|
||||
],
|
||||
href="https://prowler.pro/",
|
||||
target="_blank",
|
||||
className="text-prowler-stone-900 inline-flex px-4 py-2 text-xs font-bold uppercase transition-all rounded-lg text-gray-900 hover:bg-prowler-stone-900/10 border-solid border-1 hover:border-prowler-stone-900/10 hover:border-solid hover:border-1 border-prowler-stone-900/10",
|
||||
),
|
||||
],
|
||||
className="flex justify-between border-b border-prowler-500 pb-3",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div([date_dropdown], className=""),
|
||||
html.Div([account_dropdown], className=""),
|
||||
html.Div([region_dropdown], className=""),
|
||||
html.Div([compliance_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-4 lg:gap-y-0",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 md:col-span-4 gap-y-4",
|
||||
id="overall_status_result_graph",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 md:col-span-7 md:col-end-13 gap-y-4",
|
||||
id="security_level_graph",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 md:col-span-2 gap-y-4",
|
||||
id="",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 grid-cols-12 lg:gap-y-0",
|
||||
),
|
||||
html.H4(
|
||||
"Details compliance:",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
),
|
||||
html.Div(className="flex flex-wrap", id="output"),
|
||||
html.Img(
|
||||
referrerPolicy="no-referrer-when-downgrade",
|
||||
src="https://prowlerpro.matomo.cloud/matomo.php?idsite=2&rec=1&action_name=dashboard-compliance",
|
||||
alt="",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-8 gap-y-8 2xl:container mx-auto",
|
||||
)
|
||||
585
dashboard/pages/compliance.py
Normal file
@@ -0,0 +1,585 @@
|
||||
# Standard library imports
|
||||
import csv
|
||||
import glob
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
|
||||
# Third-party imports
|
||||
import dash
|
||||
import pandas as pd
|
||||
import plotly.express as px
|
||||
from dash import callback, dcc, html
|
||||
from dash.dependencies import Input, Output
|
||||
|
||||
# Config import
|
||||
from dashboard.config import folder_path_compliance
|
||||
from dashboard.lib.dropdowns import (
|
||||
create_account_dropdown_compliance,
|
||||
create_compliance_dropdown,
|
||||
create_date_dropdown_compliance,
|
||||
create_region_dropdown_compliance,
|
||||
)
|
||||
from dashboard.lib.layouts import create_layout_compliance
|
||||
|
||||
# Suppress warnings
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
# Global variables
|
||||
# TODO: Create a flag to let the user put a custom path
|
||||
|
||||
csv_files = []
|
||||
for file in glob.glob(os.path.join(folder_path_compliance, "*.csv")):
|
||||
with open(file, "r", newline="") as csvfile:
|
||||
reader = csv.reader(csvfile)
|
||||
num_rows = sum(1 for row in reader)
|
||||
if num_rows > 1:
|
||||
csv_files.append(file)
|
||||
|
||||
|
||||
def load_csv_files(csv_files):
|
||||
# Load CSV files into a single pandas DataFrame.
|
||||
dfs = []
|
||||
results = []
|
||||
for file in csv_files:
|
||||
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
|
||||
if "CHECKID" in df.columns:
|
||||
dfs.append(df)
|
||||
result = file
|
||||
result = result.split("/")[-1]
|
||||
result = re.sub(r"^.*?_", "", result)
|
||||
result = result.replace(".csv", "")
|
||||
result = result.upper()
|
||||
if "AWS" in result:
|
||||
if "AWS_" in result:
|
||||
result = result.replace("_AWS", "")
|
||||
else:
|
||||
result = result.replace("_AWS", " - AWS")
|
||||
if "GCP" in result:
|
||||
result = result.replace("_GCP", " - GCP")
|
||||
if "AZURE" in result:
|
||||
result = result.replace("_AZURE", " - AZURE")
|
||||
if "KUBERNETES" in result:
|
||||
result = result.replace("_KUBERNETES", " - KUBERNETES")
|
||||
results.append(result)
|
||||
|
||||
unique_results = set(results)
|
||||
results = list(unique_results)
|
||||
# Check if there is any CIS report in the list and divide it in level 1 and level 2
|
||||
new_results = []
|
||||
old_results = results.copy()
|
||||
for compliance_name in results:
|
||||
if "CIS_" in compliance_name:
|
||||
old_results.remove(compliance_name)
|
||||
new_results.append(compliance_name + " - Level_1")
|
||||
new_results.append(compliance_name + " - Level_2")
|
||||
|
||||
results = old_results + new_results
|
||||
results.sort()
|
||||
# Handle the case where there are no CSV files
|
||||
try:
|
||||
data = pd.concat(dfs, ignore_index=True)
|
||||
except ValueError:
|
||||
data = None
|
||||
return data, results
|
||||
|
||||
|
||||
data, results = load_csv_files(csv_files)
|
||||
|
||||
if data is None:
|
||||
dash.register_page(__name__)
|
||||
layout = html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.H5(
|
||||
"No data found, check if the CSV files are in the correct folder.",
|
||||
className="card-title",
|
||||
style={"text-align": "left"},
|
||||
)
|
||||
],
|
||||
style={
|
||||
"width": "99%",
|
||||
"margin-right": "0.8%",
|
||||
"margin-bottom": "10px",
|
||||
},
|
||||
)
|
||||
]
|
||||
)
|
||||
else:
|
||||
|
||||
data["ASSESSMENTDATE"] = pd.to_datetime(data["ASSESSMENTDATE"])
|
||||
data["ASSESSMENT_TIME"] = data["ASSESSMENTDATE"].dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
data_values = data["ASSESSMENT_TIME"].unique()
|
||||
data_values.sort()
|
||||
data_values = data_values[::-1]
|
||||
aux = []
|
||||
for value in data_values:
|
||||
if value.split(" ")[0] not in [aux[i].split(" ")[0] for i in range(len(aux))]:
|
||||
aux.append(value)
|
||||
data_values = aux
|
||||
|
||||
data = data[data["ASSESSMENT_TIME"].isin(data_values)]
|
||||
data["ASSESSMENT_TIME"] = data["ASSESSMENT_TIME"].apply(lambda x: x.split(" ")[0])
|
||||
|
||||
# Select Compliance - Dropdown
|
||||
|
||||
compliance_dropdown = create_compliance_dropdown(results)
|
||||
|
||||
# Select Account - Dropdown
|
||||
|
||||
select_account_dropdown_list = ["All"]
|
||||
# Append to the list the unique values of the columns ACCOUNTID, PROJECTID and SUBSCRIPTIONID if they exist
|
||||
if "ACCOUNTID" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["ACCOUNTID"].unique()
|
||||
)
|
||||
if "PROJECTID" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["PROJECTID"].unique()
|
||||
)
|
||||
if "SUBSCRIPTIONID" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["SUBSCRIPTIONID"].unique()
|
||||
)
|
||||
if "SUBSCRIPTION" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["SUBSCRIPTION"].unique()
|
||||
)
|
||||
|
||||
list_items = []
|
||||
for item in select_account_dropdown_list:
|
||||
if item.__class__.__name__ == "str" and "nan" not in item:
|
||||
list_items.append(item)
|
||||
|
||||
account_dropdown = create_account_dropdown_compliance(list_items)
|
||||
|
||||
# Select Region - Dropdown
|
||||
|
||||
select_region_dropdown_list = ["All"]
|
||||
# Append to the list the unique values of the column REGION or LOCATION if it exists
|
||||
if "REGION" in data.columns:
|
||||
# Handle the case where the column REGION is empty
|
||||
data["REGION"] = data["REGION"].fillna("-")
|
||||
select_region_dropdown_list = select_region_dropdown_list + list(
|
||||
data["REGION"].unique()
|
||||
)
|
||||
if "LOCATION" in data.columns:
|
||||
# Handle the case where the column LOCATION is empty
|
||||
data["LOCATION"] = data["LOCATION"].fillna("-")
|
||||
select_region_dropdown_list = select_region_dropdown_list + list(
|
||||
data["LOCATION"].unique()
|
||||
)
|
||||
|
||||
# Clear the list from None and NaN values
|
||||
list_items = []
|
||||
for item in select_region_dropdown_list:
|
||||
if item.__class__.__name__ == "str":
|
||||
list_items.append(item)
|
||||
|
||||
region_dropdown = create_region_dropdown_compliance(list_items)
|
||||
|
||||
# Select Date - Dropdown
|
||||
|
||||
date_dropdown = create_date_dropdown_compliance(
|
||||
list(data["ASSESSMENT_TIME"].unique())
|
||||
)
|
||||
|
||||
dash.register_page(__name__)
|
||||
|
||||
layout = create_layout_compliance(
|
||||
account_dropdown, date_dropdown, region_dropdown, compliance_dropdown
|
||||
)
|
||||
|
||||
|
||||
@callback(
|
||||
[
|
||||
Output("output", "children"),
|
||||
Output("overall_status_result_graph", "children"),
|
||||
Output("security_level_graph", "children"),
|
||||
Output("cloud-account-filter-compliance", "value"),
|
||||
Output("cloud-account-filter-compliance", "options"),
|
||||
Output("region-filter-compliance", "value"),
|
||||
Output("region-filter-compliance", "options"),
|
||||
Output("date-filter-analytics", "value"),
|
||||
Output("date-filter-analytics", "options"),
|
||||
],
|
||||
Input("report-compliance-filter", "value"),
|
||||
Input("cloud-account-filter-compliance", "value"),
|
||||
Input("region-filter-compliance", "value"),
|
||||
Input("date-filter-analytics", "value"),
|
||||
)
|
||||
def display_data(
|
||||
analytics_input, account_filter, region_filter_analytics, date_filter_analytics
|
||||
):
|
||||
|
||||
current_compliance = analytics_input
|
||||
analytics_input = analytics_input.replace(" - ", "_")
|
||||
analytics_input = analytics_input.lower()
|
||||
|
||||
# Check if the compliance selected is the level 1 or level 2 of the CIS
|
||||
is_level_1 = "level_1" in analytics_input
|
||||
analytics_input = analytics_input.replace("_level_1", "").replace("_level_2", "")
|
||||
|
||||
# Filter the data based on the compliance selected
|
||||
files = [file for file in csv_files if analytics_input in file]
|
||||
|
||||
def load_csv_files(files):
|
||||
"""Load CSV files into a single pandas DataFrame."""
|
||||
dfs = []
|
||||
for file in files:
|
||||
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
|
||||
dfs.append(df.astype(str))
|
||||
return pd.concat(dfs, ignore_index=True)
|
||||
|
||||
data = load_csv_files(files)
|
||||
|
||||
# Rename the column LOCATION to REGION for GCP or Azure
|
||||
if "gcp" in analytics_input or "azure" in analytics_input:
|
||||
data = data.rename(columns={"LOCATION": "REGION"})
|
||||
|
||||
# Add the column ACCOUNTID to the data if the provider is kubernetes
|
||||
if "kubernetes" in analytics_input:
|
||||
data.rename(columns={"CONTEXT": "ACCOUNTID"}, inplace=True)
|
||||
data.rename(columns={"NAMESPACE": "REGION"}, inplace=True)
|
||||
if "REQUIREMENTS_ATTRIBUTES_PROFILE" in data.columns:
|
||||
data["REQUIREMENTS_ATTRIBUTES_PROFILE"] = data[
|
||||
"REQUIREMENTS_ATTRIBUTES_PROFILE"
|
||||
].apply(lambda x: x.split(" - ")[0])
|
||||
# Filter the chosen level of the CIS
|
||||
if is_level_1:
|
||||
data = data[data["REQUIREMENTS_ATTRIBUTES_PROFILE"] == "Level 1"]
|
||||
|
||||
# Rename the column PROJECTID to ACCOUNTID for GCP
|
||||
if data.columns.str.contains("PROJECTID").any():
|
||||
data.rename(columns={"PROJECTID": "ACCOUNTID"}, inplace=True)
|
||||
|
||||
# Rename the column SUBSCRIPTIONID to ACCOUNTID for Azure
|
||||
if data.columns.str.contains("SUBSCRIPTIONID").any():
|
||||
data.rename(columns={"SUBSCRIPTIONID": "ACCOUNTID"}, inplace=True)
|
||||
# Handle v3 azure cis compliance
|
||||
if data.columns.str.contains("SUBSCRIPTION").any():
|
||||
data.rename(columns={"SUBSCRIPTION": "ACCOUNTID"}, inplace=True)
|
||||
data["REGION"] = "-"
|
||||
|
||||
# Filter ACCOUNT
|
||||
if account_filter == ["All"]:
|
||||
updated_cloud_account_values = data["ACCOUNTID"].unique()
|
||||
elif "All" in account_filter and len(account_filter) > 1:
|
||||
# Remove 'All' from the list
|
||||
account_filter.remove("All")
|
||||
updated_cloud_account_values = account_filter
|
||||
elif len(account_filter) == 0:
|
||||
updated_cloud_account_values = data["ACCOUNTID"].unique()
|
||||
account_filter = ["All"]
|
||||
else:
|
||||
updated_cloud_account_values = account_filter
|
||||
|
||||
data = data[data["ACCOUNTID"].isin(updated_cloud_account_values)]
|
||||
|
||||
account_filter_options = list(data["ACCOUNTID"].unique())
|
||||
account_filter_options = account_filter_options + ["All"]
|
||||
for item in account_filter_options:
|
||||
if "nan" in item or item.__class__.__name__ != "str" or item is None:
|
||||
account_filter_options.remove(item)
|
||||
|
||||
# Filter REGION
|
||||
if region_filter_analytics == ["All"]:
|
||||
updated_region_account_values = data["REGION"].unique()
|
||||
elif "All" in region_filter_analytics and len(region_filter_analytics) > 1:
|
||||
# Remove 'All' from the list
|
||||
region_filter_analytics.remove("All")
|
||||
updated_region_account_values = region_filter_analytics
|
||||
elif len(region_filter_analytics) == 0:
|
||||
updated_region_account_values = data["REGION"].unique()
|
||||
region_filter_analytics = ["All"]
|
||||
else:
|
||||
updated_region_account_values = region_filter_analytics
|
||||
|
||||
data = data[data["REGION"].isin(updated_region_account_values)]
|
||||
|
||||
region_filter_options = list(data["REGION"].unique())
|
||||
region_filter_options = region_filter_options + ["All"]
|
||||
for item in region_filter_options:
|
||||
if item == "nan" or item.__class__.__name__ != "str":
|
||||
region_filter_options.remove(item)
|
||||
|
||||
data["ASSESSMENTDATE"] = pd.to_datetime(data["ASSESSMENTDATE"], errors="coerce")
|
||||
data["ASSESSMENTDATE"] = data["ASSESSMENTDATE"].dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
# Choosing the date that is the most recent
|
||||
data_values = data["ASSESSMENTDATE"].unique()
|
||||
data_values.sort()
|
||||
data_values = data_values[::-1]
|
||||
aux = []
|
||||
|
||||
data_values = [str(i) for i in data_values]
|
||||
for value in data_values:
|
||||
if value.split(" ")[0] not in [aux[i].split(" ")[0] for i in range(len(aux))]:
|
||||
aux.append(value)
|
||||
data_values = [str(i) for i in aux]
|
||||
|
||||
data = data[data["ASSESSMENTDATE"].isin(data_values)]
|
||||
data["ASSESSMENTDATE"] = data["ASSESSMENTDATE"].apply(lambda x: x.split(" ")[0])
|
||||
|
||||
options_date = data["ASSESSMENTDATE"].unique()
|
||||
options_date.sort()
|
||||
options_date = options_date[::-1]
|
||||
|
||||
# Filter DATE
|
||||
if date_filter_analytics in options_date:
|
||||
data = data[data["ASSESSMENTDATE"] == date_filter_analytics]
|
||||
else:
|
||||
date_filter_analytics = options_date[0]
|
||||
data = data[data["ASSESSMENTDATE"] == date_filter_analytics]
|
||||
|
||||
if data.empty:
|
||||
fig = px.pie()
|
||||
pie_1 = dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "250px", "width": "250px", "right": "0px"},
|
||||
)
|
||||
|
||||
return [
|
||||
html.Div(
|
||||
[
|
||||
html.H5(
|
||||
"No data found for this compliance",
|
||||
className="card-title",
|
||||
style={"text-align": "left"},
|
||||
)
|
||||
],
|
||||
style={
|
||||
"width": "99%",
|
||||
"margin-right": "0.8%",
|
||||
"margin-bottom": "10px",
|
||||
},
|
||||
)
|
||||
]
|
||||
else:
|
||||
# Check cases where the compliance start with AWS_
|
||||
if "aws_" in analytics_input:
|
||||
analytics_input = analytics_input + "_aws"
|
||||
try:
|
||||
current = analytics_input.replace(".", "_")
|
||||
compliance_module = importlib.import_module(
|
||||
f"dashboard.compliance.{current}"
|
||||
)
|
||||
data.drop_duplicates(keep="first", inplace=True)
|
||||
table = compliance_module.get_table(data)
|
||||
except ModuleNotFoundError:
|
||||
table = html.Div(
|
||||
[
|
||||
html.H5(
|
||||
"No data found for this compliance",
|
||||
className="card-title",
|
||||
style={"text-align": "left", "color": "black"},
|
||||
)
|
||||
],
|
||||
style={
|
||||
"width": "99%",
|
||||
"margin-right": "0.8%",
|
||||
"margin-bottom": "10px",
|
||||
},
|
||||
)
|
||||
|
||||
df = data.copy()
|
||||
df = df.groupby(["STATUS"]).size().reset_index(name="counts")
|
||||
df = df.sort_values(by=["counts"], ascending=False)
|
||||
|
||||
# Pie 1
|
||||
pie_1 = get_pie(df)
|
||||
|
||||
# Get the pie2 depending on the compliance
|
||||
df = data.copy()
|
||||
|
||||
current_filter = ""
|
||||
|
||||
if "pci" in analytics_input:
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ID")
|
||||
current_filter = "req_id"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_SECTION"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_SECTION")
|
||||
current_filter = "sections"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORIA" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_CATEGORIA"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_CATEGORIA")
|
||||
current_filter = "categorias"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORY" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_CATEGORY"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_CATEGORY")
|
||||
current_filter = "categories"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_SERVICE" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_SERVICE"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_SERVICE")
|
||||
current_filter = "services"
|
||||
else:
|
||||
fig = px.pie()
|
||||
fig.update_layout(
|
||||
margin=dict(l=0, r=0, t=0, b=0),
|
||||
autosize=True,
|
||||
showlegend=False,
|
||||
paper_bgcolor="#303030",
|
||||
)
|
||||
pie_2 = dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "250px", "width": "250px", "right": "0px"},
|
||||
)
|
||||
current_filter = "none"
|
||||
|
||||
# Analytics table
|
||||
|
||||
if not analytics_input:
|
||||
analytics_input = ""
|
||||
|
||||
table_output = get_table(current_compliance, table)
|
||||
|
||||
overall_status_result_graph = get_graph(pie_1, "Overall Status Result")
|
||||
|
||||
security_level_graph = get_graph(
|
||||
pie_2, f"Top 5 failed {current_filter} by findings"
|
||||
)
|
||||
|
||||
return (
|
||||
table_output,
|
||||
overall_status_result_graph,
|
||||
security_level_graph,
|
||||
account_filter,
|
||||
account_filter_options,
|
||||
region_filter_analytics,
|
||||
region_filter_options,
|
||||
date_filter_analytics,
|
||||
options_date,
|
||||
)
|
||||
|
||||
|
||||
def get_graph(pie, title):
|
||||
return [
|
||||
html.Span(
|
||||
title,
|
||||
className="text-center text-prowler-stone-900 uppercase text-xs font-bold",
|
||||
),
|
||||
html.Div(
|
||||
[pie],
|
||||
className="",
|
||||
style={
|
||||
"display": "flex",
|
||||
"justify-content": "center",
|
||||
"align-items": "center",
|
||||
"margin-top": "7%",
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def get_bar_graph(df, column_name):
|
||||
df = df[df["STATUS"] == "FAIL"]
|
||||
df = df.groupby([column_name, "STATUS"]).size().reset_index(name="counts")
|
||||
df = df.sort_values(by=["counts"], ascending=True)
|
||||
# take the top 5
|
||||
df = df.tail(5)
|
||||
|
||||
colums = df[column_name].unique()
|
||||
|
||||
# Cut the text if it is too long
|
||||
for i in range(len(colums)):
|
||||
if len(colums[i]) > 15:
|
||||
colums[i] = colums[i][:15] + "..."
|
||||
|
||||
fig = px.bar(
|
||||
df,
|
||||
x="counts",
|
||||
y=colums,
|
||||
color="STATUS",
|
||||
color_discrete_map={"FAIL": "#e67272"},
|
||||
orientation="h",
|
||||
)
|
||||
|
||||
fig.update_layout(
|
||||
margin=dict(l=0, r=0, t=0, b=0),
|
||||
autosize=True,
|
||||
showlegend=False,
|
||||
xaxis_title=None,
|
||||
yaxis_title=None,
|
||||
font=dict(size=14, color="#292524"),
|
||||
hoverlabel=dict(font_size=12),
|
||||
paper_bgcolor="#FFF",
|
||||
)
|
||||
|
||||
return dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "20rem", "width": "40rem"},
|
||||
)
|
||||
|
||||
|
||||
def get_pie(df):
|
||||
# Define custom colors
|
||||
color_mapping = {
|
||||
"FAIL": "#e67272",
|
||||
"PASS": "#54d283",
|
||||
"INFO": "#2684FF",
|
||||
"WARN": "#260000",
|
||||
"MANUAL": "#636c78",
|
||||
}
|
||||
|
||||
# Use the color_discrete_map parameter to map categories to custom colors
|
||||
fig = px.pie(
|
||||
df,
|
||||
names="STATUS",
|
||||
values="counts",
|
||||
hole=0.7,
|
||||
color="STATUS",
|
||||
color_discrete_map=color_mapping,
|
||||
)
|
||||
fig.update_traces(
|
||||
hovertemplate=None,
|
||||
textposition="outside",
|
||||
textinfo="percent+label",
|
||||
rotation=50,
|
||||
)
|
||||
|
||||
fig.update_layout(
|
||||
margin=dict(l=0, r=0, t=0, b=0),
|
||||
autosize=True,
|
||||
showlegend=False,
|
||||
font=dict(size=14, color="#292524"),
|
||||
hoverlabel=dict(font_size=12),
|
||||
paper_bgcolor="#FFF",
|
||||
)
|
||||
|
||||
pie = dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "20rem", "width": "20rem"},
|
||||
)
|
||||
|
||||
return pie
|
||||
|
||||
|
||||
def get_table(current_compliance, table):
|
||||
return [
|
||||
html.Div(
|
||||
[
|
||||
html.H5(
|
||||
f"{current_compliance}",
|
||||
className="text-prowler-stone-900 text-md font-bold uppercase mb-4",
|
||||
),
|
||||
table,
|
||||
],
|
||||
className="relative flex flex-col bg-white shadow-provider rounded-xl px-4 py-3 flex-wrap w-full",
|
||||
),
|
||||
]
|
||||
819
dashboard/pages/overview.py
Normal file
@@ -0,0 +1,819 @@
|
||||
# Standard library imports
|
||||
import csv
|
||||
import glob
|
||||
import os
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from itertools import product
|
||||
|
||||
# Third-party imports
|
||||
import dash
|
||||
import dash_bootstrap_components as dbc
|
||||
import pandas as pd
|
||||
import plotly.express as px
|
||||
import plotly.graph_objects as go
|
||||
from dash import callback, ctx, dcc, html
|
||||
from dash.dependencies import Input, Output
|
||||
|
||||
# Config import
|
||||
from dashboard.config import folder_path_overview
|
||||
from dashboard.lib.cards import create_provider_card
|
||||
from dashboard.lib.dropdowns import (
|
||||
create_account_dropdown,
|
||||
create_date_dropdown,
|
||||
create_region_dropdown,
|
||||
)
|
||||
from dashboard.lib.layouts import create_layout_overview
|
||||
|
||||
# Suppress warnings
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
# Global variables
|
||||
# TODO: Create a flag to let the user put a custom path
|
||||
csv_files = []
|
||||
|
||||
for file in glob.glob(os.path.join(folder_path_overview, "*.csv")):
|
||||
with open(file, "r", newline="") as csvfile:
|
||||
reader = csv.reader(csvfile)
|
||||
num_rows = sum(1 for row in reader)
|
||||
if num_rows > 1:
|
||||
csv_files.append(file)
|
||||
|
||||
|
||||
# Import logos providers
|
||||
aws_provider_logo = html.Img(
|
||||
src="assets/images/providers/aws_provider.png", alt="aws provider"
|
||||
)
|
||||
azure_provider_logo = html.Img(
|
||||
src="assets/images/providers/azure_provider.png", alt="azure provider"
|
||||
)
|
||||
gcp_provider_logo = html.Img(
|
||||
src="assets/images/providers/gcp_provider.png", alt="gcp provider"
|
||||
)
|
||||
ks8_provider_logo = html.Img(
|
||||
src="assets/images/providers/k8s_provider.png", alt="k8s provider"
|
||||
)
|
||||
|
||||
|
||||
def load_csv_files(csv_files):
|
||||
"""Load CSV files into a single pandas DataFrame."""
|
||||
dfs = []
|
||||
for file in csv_files:
|
||||
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
|
||||
if "CHECK_ID" in df.columns:
|
||||
dfs.append(df.astype(str))
|
||||
# Handle the case where there are no files
|
||||
try:
|
||||
data = pd.concat(dfs, ignore_index=True)
|
||||
except ValueError:
|
||||
data = None
|
||||
return data
|
||||
|
||||
|
||||
data = load_csv_files(csv_files)
|
||||
|
||||
if data is None:
|
||||
# Initializing the Dash App
|
||||
dash.register_page(__name__, path="/")
|
||||
|
||||
layout = html.Div(
|
||||
[
|
||||
html.H1(
|
||||
"No data available",
|
||||
className="text-prowler-stone-900 text-2xxl font-bold",
|
||||
),
|
||||
html.Div(className="flex justify-between border-b border-prowler-500 pb-3"),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
"Check the data folder to see if the files are in the correct format",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
)
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-3 lg:gap-y-0",
|
||||
),
|
||||
]
|
||||
)
|
||||
else:
|
||||
|
||||
# This handles the case where we are using v3 outputs
|
||||
if "ASSESSMENT_START_TIME" in data.columns:
|
||||
data["ASSESSMENT_START_TIME"] = data["ASSESSMENT_START_TIME"].str.replace(
|
||||
"T", " "
|
||||
)
|
||||
# for each row, we are going to take the ASSESMENT_START_TIME if is not null and put it in the TIMESTAMP column
|
||||
data["TIMESTAMP"] = data.apply(
|
||||
lambda x: (
|
||||
x["ASSESSMENT_START_TIME"]
|
||||
if pd.isnull(x["TIMESTAMP"])
|
||||
else x["TIMESTAMP"]
|
||||
),
|
||||
axis=1,
|
||||
)
|
||||
if "ACCOUNT_ID" in data.columns:
|
||||
data["ACCOUNT_UID"] = data.apply(
|
||||
lambda x: (
|
||||
x["ACCOUNT_ID"] if pd.isnull(x["ACCOUNT_UID"]) else x["ACCOUNT_UID"]
|
||||
),
|
||||
axis=1,
|
||||
)
|
||||
# Rename the column RESOURCE_ID to RESOURCE_UID
|
||||
if "RESOURCE_ID" in data.columns:
|
||||
data["RESOURCE_UID"] = data.apply(
|
||||
lambda x: (
|
||||
x["RESOURCE_ID"] if pd.isnull(x["RESOURCE_UID"]) else x["RESOURCE_UID"]
|
||||
),
|
||||
axis=1,
|
||||
)
|
||||
# Rename the column "SUBSCRIPTION" to "ACCOUNT_UID"
|
||||
if "SUBSCRIPTION" in data.columns:
|
||||
data["ACCOUNT_UID"] = data.apply(
|
||||
lambda x: (
|
||||
x["SUBSCRIPTION"] if pd.isnull(x["ACCOUNT_UID"]) else x["ACCOUNT_UID"]
|
||||
),
|
||||
axis=1,
|
||||
)
|
||||
|
||||
# For the timestamp, remove the two columns and keep only the date
|
||||
|
||||
data["TIMESTAMP"] = pd.to_datetime(data["TIMESTAMP"])
|
||||
data["ASSESSMENT_TIME"] = data["TIMESTAMP"].dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
data_valid = pd.DataFrame()
|
||||
for account in data["ACCOUNT_UID"].unique():
|
||||
all_times = data[data["ACCOUNT_UID"] == account]["ASSESSMENT_TIME"].unique()
|
||||
all_times.sort()
|
||||
all_times = all_times[::-1]
|
||||
times = []
|
||||
# select the last ASSESSMENT_TIME in the day for each account
|
||||
for time in all_times:
|
||||
if time.split(" ")[0] not in [
|
||||
times[i].split(" ")[0] for i in range(len(times))
|
||||
]:
|
||||
times.append(time)
|
||||
# select the data from the last ASSESSMENT_TIME of the day
|
||||
data_valid = pd.concat(
|
||||
[
|
||||
data_valid,
|
||||
data[
|
||||
(data["ACCOUNT_UID"] == account)
|
||||
& (data["ASSESSMENT_TIME"].isin(times))
|
||||
],
|
||||
]
|
||||
)
|
||||
data = data_valid
|
||||
# Select only the day in the data
|
||||
data["ASSESSMENT_TIME"] = data["ASSESSMENT_TIME"].apply(lambda x: x.split(" ")[0])
|
||||
|
||||
data["TIMESTAMP"] = data["TIMESTAMP"].dt.strftime("%Y-%m-%d")
|
||||
data["TIMESTAMP"] = pd.to_datetime(data["TIMESTAMP"])
|
||||
|
||||
# Assessment Date Dropdown
|
||||
assesment_times = list(data["ASSESSMENT_TIME"].unique())
|
||||
assesment_times.sort()
|
||||
assesment_times.reverse()
|
||||
date_dropdown = create_date_dropdown(assesment_times)
|
||||
|
||||
# Cloud Account Dropdown
|
||||
accounts = []
|
||||
if "ACCOUNT_NAME" in data.columns:
|
||||
for account in data["ACCOUNT_NAME"].unique():
|
||||
if "azure" in list(data[data["ACCOUNT_NAME"] == account]["PROVIDER"]):
|
||||
accounts.append(account + " - AZURE")
|
||||
if "gcp" in list(data[data["ACCOUNT_NAME"] == account]["PROVIDER"]):
|
||||
accounts.append(account + " - GCP")
|
||||
|
||||
if "ACCOUNT_UID" in data.columns:
|
||||
for account in data["ACCOUNT_UID"].unique():
|
||||
if "aws" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
accounts.append(account + " - AWS")
|
||||
if "kubernetes" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
accounts.append(account + " - K8S")
|
||||
|
||||
account_dropdown = create_account_dropdown(accounts)
|
||||
|
||||
# Region Dropdown
|
||||
# Handle the case where there is location column
|
||||
if "LOCATION" in data.columns:
|
||||
data["REGION"] = data["LOCATION"]
|
||||
# Hande the case where there is no region column
|
||||
if "REGION" not in data.columns:
|
||||
data["REGION"] = "-"
|
||||
# Handle the case where the region is null
|
||||
data["REGION"].fillna("-")
|
||||
regions = ["All"] + list(data["REGION"].unique())
|
||||
regions = [x for x in regions if str(x) != "nan" and x.__class__.__name__ == "str"]
|
||||
# Correct the values
|
||||
options = []
|
||||
for value in regions:
|
||||
if " " in value:
|
||||
options.append(value.split(" ")[1])
|
||||
else:
|
||||
options.append(value)
|
||||
regions = options
|
||||
region_dropdown = create_region_dropdown(regions)
|
||||
|
||||
# Create the download button
|
||||
download_button = html.Button(
|
||||
"Download this table as CSV",
|
||||
id="download_link",
|
||||
n_clicks=0,
|
||||
className="border-solid border-2 border-prowler-stone-900/10 hover:border-solid hover:border-2 hover:border-prowler-stone-900/10 text-prowler-stone-900 inline-block px-4 py-2 text-xs font-bold uppercase transition-all rounded-lg text-gray-900 hover:bg-prowler-stone-900/10 flex justify-end w-fit",
|
||||
)
|
||||
|
||||
# Initializing the Dash App
|
||||
dash.register_page(__name__, path="/")
|
||||
|
||||
# Create the layout
|
||||
layout = create_layout_overview(
|
||||
account_dropdown, date_dropdown, region_dropdown, download_button
|
||||
)
|
||||
|
||||
|
||||
# Callback to display selected value
|
||||
@callback(
|
||||
[
|
||||
Output("status_graph", "children"),
|
||||
Output("two_pie_chart", "children"),
|
||||
Output("line_plot", "children"),
|
||||
Output("table", "children"),
|
||||
Output("download-data", "data"),
|
||||
Output("cloud-account-filter", "value"),
|
||||
Output("cloud-account-filter", "options"),
|
||||
Output("region-filter", "value"),
|
||||
Output("region-filter", "options"),
|
||||
Output("report-date-filter", "value"),
|
||||
Output("aws_card", "children"),
|
||||
Output("azure_card", "children"),
|
||||
Output("gcp_card", "children"),
|
||||
Output("k8s_card", "children"),
|
||||
Output("subscribe_card", "children"),
|
||||
Output("info-file-over", "title"),
|
||||
],
|
||||
Input("cloud-account-filter", "value"),
|
||||
Input("region-filter", "value"),
|
||||
Input("report-date-filter", "value"),
|
||||
)
|
||||
def filter_data(cloud_account_values, region_account_values, assessment_value):
|
||||
filtered_data = data.copy()
|
||||
# For all the data, we will add to the status column the value 'MUTED (FAIL)' and 'MUTED (PASS)' depending on the value of the column 'STATUS' and 'MUTED'
|
||||
if "MUTED" in filtered_data.columns:
|
||||
filtered_data["STATUS"] = filtered_data.apply(
|
||||
lambda x: (
|
||||
"MUTED (FAIL)"
|
||||
if x["STATUS"] == "FAIL" and x["MUTED"] == "True"
|
||||
else x["STATUS"]
|
||||
),
|
||||
axis=1,
|
||||
)
|
||||
filtered_data["STATUS"] = filtered_data.apply(
|
||||
lambda x: (
|
||||
"MUTED (PASS)"
|
||||
if x["STATUS"] == "PASS" and x["MUTED"] == "True"
|
||||
else x["STATUS"]
|
||||
),
|
||||
axis=1,
|
||||
)
|
||||
filtered_data["STATUS"] = filtered_data.apply(
|
||||
lambda x: (
|
||||
"MUTED (MANUAL)"
|
||||
if x["STATUS"] == "MANUAL" and x["MUTED"] == "True"
|
||||
else x["STATUS"]
|
||||
),
|
||||
axis=1,
|
||||
)
|
||||
|
||||
# Take the latest date of de data
|
||||
account_date = filtered_data["ASSESSMENT_TIME"].unique()
|
||||
account_date.sort()
|
||||
account_date = account_date[::-1]
|
||||
|
||||
start_date = datetime.strptime(account_date[0], "%Y-%m-%d") - timedelta(days=7)
|
||||
end_date = datetime.strptime(account_date[0], "%Y-%m-%d")
|
||||
|
||||
filtered_data_sp = filtered_data[
|
||||
(filtered_data["TIMESTAMP"] >= start_date)
|
||||
& (filtered_data["TIMESTAMP"] <= end_date)
|
||||
]
|
||||
# We are taking the latest date if there is only one account
|
||||
# Filter Assessment Time
|
||||
if assessment_value in account_date:
|
||||
updated_assessment_value = assessment_value
|
||||
else:
|
||||
updated_assessment_value = account_date[0]
|
||||
assessment_value = account_date[0]
|
||||
filtered_data = filtered_data[
|
||||
filtered_data["ASSESSMENT_TIME"] == updated_assessment_value
|
||||
]
|
||||
|
||||
# Select the files in the list_files that have the same date as the selected date
|
||||
list_files = []
|
||||
for file in csv_files:
|
||||
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
|
||||
if "CHECK_ID" in df.columns:
|
||||
# This handles the case where we are using v3 outputs
|
||||
if "TIMESTAMP" not in df.columns:
|
||||
# Rename the column 'ASSESSMENT_START_TIME' to 'TIMESTAMP'
|
||||
df.rename(columns={"ASSESSMENT_START_TIME": "TIMESTAMP"}, inplace=True)
|
||||
df["TIMESTAMP"] = df["TIMESTAMP"].str.replace("T", " ")
|
||||
elif "ASSESSMENT_START_TIME" in df.columns:
|
||||
df["ASSESSMENT_START_TIME"] = df["ASSESSMENT_START_TIME"].str.replace(
|
||||
"T", " "
|
||||
)
|
||||
# for each row, we are going to take the ASSESMENT_START_TIME if is not null and put it in the TIMESTAMP column
|
||||
df["TIMESTAMP"] = df.apply(
|
||||
lambda x: (
|
||||
x["ASSESSMENT_START_TIME"]
|
||||
if pd.isnull(x["TIMESTAMP"])
|
||||
else x["TIMESTAMP"]
|
||||
),
|
||||
axis=1,
|
||||
)
|
||||
df["TIMESTAMP"] = pd.to_datetime(df["TIMESTAMP"])
|
||||
df["TIMESTAMP"] = df["TIMESTAMP"].dt.strftime("%Y-%m-%d")
|
||||
if df["TIMESTAMP"][0] == updated_assessment_value:
|
||||
list_files.append(file)
|
||||
# append all the names of the files
|
||||
files_names = []
|
||||
for file in list_files:
|
||||
files_names.append(file.split("/")[-1])
|
||||
|
||||
list_files = ",\n".join(files_names)
|
||||
list_files = "Files Scanned:\n" + list_files
|
||||
|
||||
# Change the account selector to the values that are allowed
|
||||
filtered_data = filtered_data[
|
||||
filtered_data["ASSESSMENT_TIME"] == updated_assessment_value
|
||||
]
|
||||
|
||||
# fill all_account_ids with the account_uid for the provider aws and kubernetes
|
||||
all_account_ids = []
|
||||
if "ACCOUNT_UID" in filtered_data.columns:
|
||||
for account in filtered_data["ACCOUNT_UID"].unique():
|
||||
if "aws" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
all_account_ids.append(account)
|
||||
if "kubernetes" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
all_account_ids.append(account)
|
||||
|
||||
all_account_names = []
|
||||
if "ACCOUNT_NAME" in filtered_data.columns:
|
||||
for account in filtered_data["ACCOUNT_NAME"].unique():
|
||||
if "azure" in list(data[data["ACCOUNT_NAME"] == account]["PROVIDER"]):
|
||||
all_account_names.append(account)
|
||||
if "gcp" in list(data[data["ACCOUNT_NAME"] == account]["PROVIDER"]):
|
||||
all_account_names.append(account)
|
||||
|
||||
all_items = all_account_ids + all_account_names + ["All"]
|
||||
|
||||
cloud_accounts_options = ["All"]
|
||||
for item in all_items:
|
||||
if item not in cloud_accounts_options and item.__class__.__name__ == "str":
|
||||
# append the provider name depending on the account
|
||||
if "ACCOUNT_UID" in filtered_data.columns:
|
||||
if "aws" in list(data[data["ACCOUNT_UID"] == item]["PROVIDER"]):
|
||||
cloud_accounts_options.append(item + " - AWS")
|
||||
if "kubernetes" in list(data[data["ACCOUNT_UID"] == item]["PROVIDER"]):
|
||||
cloud_accounts_options.append(item + " - K8S")
|
||||
if "ACCOUNT_NAME" in filtered_data.columns:
|
||||
if "azure" in list(data[data["ACCOUNT_NAME"] == item]["PROVIDER"]):
|
||||
cloud_accounts_options.append(item + " - AZURE")
|
||||
if "gcp" in list(data[data["ACCOUNT_NAME"] == item]["PROVIDER"]):
|
||||
cloud_accounts_options.append(item + " - GCP")
|
||||
|
||||
# Filter ACCOUNT
|
||||
if cloud_account_values == ["All"]:
|
||||
updated_cloud_account_values = all_items
|
||||
elif "All" in cloud_account_values and len(cloud_account_values) > 1:
|
||||
updated_cloud_account_values = []
|
||||
# Remove 'All' from the list
|
||||
cloud_account_values.remove("All")
|
||||
for item in cloud_account_values:
|
||||
updated_cloud_account_values.append(item.split(" - ")[0])
|
||||
elif len(cloud_account_values) == 0:
|
||||
updated_cloud_account_values = all_items
|
||||
cloud_account_values = ["All"]
|
||||
else:
|
||||
updated_cloud_account_values = []
|
||||
for item in cloud_account_values:
|
||||
updated_cloud_account_values.append(item.split(" - ")[0])
|
||||
values_choice = []
|
||||
for item in updated_cloud_account_values:
|
||||
if item not in values_choice:
|
||||
values_choice.append(item)
|
||||
|
||||
# Apply the filter
|
||||
if (
|
||||
"ACCOUNT_UID" in filtered_data.columns
|
||||
and "ACCOUNT_NAME" in filtered_data.columns
|
||||
):
|
||||
filtered_data = filtered_data[
|
||||
filtered_data["ACCOUNT_UID"].isin(values_choice)
|
||||
| filtered_data["ACCOUNT_NAME"].isin(values_choice)
|
||||
]
|
||||
elif "ACCOUNT_UID" in filtered_data.columns:
|
||||
filtered_data = filtered_data[filtered_data["ACCOUNT_UID"].isin(values_choice)]
|
||||
elif "ACCOUNT_NAME" in filtered_data.columns:
|
||||
filtered_data = filtered_data[filtered_data["ACCOUNT_NAME"].isin(values_choice)]
|
||||
|
||||
copy_data = filtered_data.copy()
|
||||
# Filter REGION
|
||||
|
||||
# Check if filtered data contains an aws account
|
||||
# TODO - Handle azure locations
|
||||
if "LOCATION" in filtered_data.columns:
|
||||
filtered_data.rename(columns={"LOCATION": "REGION"}, inplace=True)
|
||||
if "REGION" not in filtered_data.columns:
|
||||
filtered_data["REGION"] = "-"
|
||||
if region_account_values == ["All"]:
|
||||
updated_region_account_values = filtered_data["REGION"].unique()
|
||||
elif "All" in region_account_values and len(region_account_values) > 1:
|
||||
# Remove 'All' from the list
|
||||
region_account_values.remove("All")
|
||||
updated_region_account_values = region_account_values
|
||||
|
||||
elif len(region_account_values) == 0:
|
||||
updated_region_account_values = filtered_data["REGION"].unique()
|
||||
region_account_values = ["All"]
|
||||
else:
|
||||
updated_region_account_values = region_account_values
|
||||
|
||||
filtered_data = filtered_data[
|
||||
filtered_data["REGION"].isin(updated_region_account_values)
|
||||
]
|
||||
|
||||
region_filter_options = ["All"] + list(copy_data["REGION"].unique())
|
||||
# clean the region_filter_options from null values
|
||||
region_filter_options = [
|
||||
x
|
||||
for x in region_filter_options
|
||||
if str(x) != "nan" and x.__class__.__name__ == "str"
|
||||
]
|
||||
# Correct the values
|
||||
options = []
|
||||
for value in region_filter_options:
|
||||
if " " in value:
|
||||
options.append(value.split(" ")[1])
|
||||
else:
|
||||
options.append(value)
|
||||
|
||||
region_filter_options = options
|
||||
|
||||
# Select failed findings
|
||||
fails_findings_default = filtered_data[filtered_data["STATUS"] == "FAIL"]
|
||||
fails_findings_muted = filtered_data[filtered_data["STATUS"] == "MUTED (FAIL)"]
|
||||
fails_findings = pd.concat([fails_findings_default, fails_findings_muted])
|
||||
|
||||
if len(filtered_data_sp) == 0:
|
||||
fig = px.pie()
|
||||
fig.update_layout(
|
||||
paper_bgcolor="#FFF",
|
||||
)
|
||||
line_chart = dcc.Graph(figure=fig, config={"displayModeBar": False})
|
||||
else:
|
||||
try:
|
||||
########################################################
|
||||
"""Line PLOT 1"""
|
||||
########################################################
|
||||
# Formating date columns
|
||||
filtered_data_sp["TIMESTAMP_formatted"] = pd.to_datetime(
|
||||
filtered_data_sp["TIMESTAMP"]
|
||||
).dt.strftime("%Y-%m-%d")
|
||||
filtered_data_sp["TIMESTAMP_formatted"] = pd.to_datetime(
|
||||
filtered_data_sp["TIMESTAMP_formatted"]
|
||||
)
|
||||
# Generate a date range for the last 30 days
|
||||
date_range = pd.date_range(start=start_date, end=end_date)
|
||||
|
||||
# Format the dates as '%Y-%m-%d'
|
||||
date_range.strftime("%Y-%m-%d").tolist()
|
||||
|
||||
# Dataframe with count of PASS FAIL Statuses
|
||||
satus_df = (
|
||||
pd.DataFrame(
|
||||
filtered_data_sp.groupby(["TIMESTAMP_formatted"])[
|
||||
"STATUS"
|
||||
].value_counts()
|
||||
)
|
||||
.rename(columns={"STATUS": "Status_count"})
|
||||
.reset_index()
|
||||
)
|
||||
satus_df = satus_df.rename(columns={"TIMESTAMP_formatted": "date"})
|
||||
|
||||
# Generate all possible combinations
|
||||
statuses = list(filtered_data_sp["STATUS"].unique())
|
||||
combinations = list(product(date_range, statuses))
|
||||
|
||||
all_date_combinations = pd.DataFrame(
|
||||
combinations, columns=["date", "STATUS"]
|
||||
)
|
||||
|
||||
result_df = all_date_combinations.merge(
|
||||
satus_df, on=["date", "STATUS"], how="left"
|
||||
)
|
||||
|
||||
result_df.rename(columns={"count": "Status_count"}, inplace=True)
|
||||
|
||||
result_df["Status_count"].fillna(0, inplace=True)
|
||||
|
||||
color_mapping = {
|
||||
"FAIL": "#e67272",
|
||||
"PASS": "#54d283",
|
||||
"INFO": "#2684FF",
|
||||
"MANUAL": "#636c78",
|
||||
"MUTED (FAIL)": "#fca903",
|
||||
"MUTED (PASS)": "#03fccf",
|
||||
"MUTED (MANUAL)": "#b33696",
|
||||
}
|
||||
|
||||
# Create a single line plot for both 'FAIL' and 'PASS' statuses
|
||||
fig6 = px.line(
|
||||
result_df,
|
||||
x="date",
|
||||
y="Status_count",
|
||||
color="STATUS",
|
||||
color_discrete_map=color_mapping,
|
||||
)
|
||||
fig6.update_traces(mode="markers+lines", marker=dict(size=8))
|
||||
fig6.update_layout(
|
||||
margin=dict(l=0, r=0, t=0, b=0),
|
||||
xaxis_title="",
|
||||
yaxis_title="",
|
||||
template="plotly",
|
||||
legend=dict(x=0.02, y=0.98),
|
||||
paper_bgcolor="#FFF",
|
||||
font=dict(size=12, color="#292524"),
|
||||
)
|
||||
|
||||
line_chart = dcc.Graph(
|
||||
figure=fig6,
|
||||
config={"displayModeBar": False, "scrollZoom": False},
|
||||
style={"height": "300px", "overflow-y": "auto"},
|
||||
className="max-h-[300px]",
|
||||
)
|
||||
except Exception:
|
||||
fig = px.pie()
|
||||
fig.update_layout(
|
||||
paper_bgcolor="#FFF",
|
||||
)
|
||||
line_chart = dcc.Graph(figure=fig, config={"displayModeBar": False})
|
||||
|
||||
# If the data is out of range Make the while dashaboard empty
|
||||
if len(filtered_data) == 0:
|
||||
fig = px.pie()
|
||||
pie_2 = dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
)
|
||||
table = dcc.Graph(figure=fig, config={"displayModeBar": False})
|
||||
|
||||
else:
|
||||
# Status Pie Chart
|
||||
df1 = filtered_data[filtered_data["STATUS"] == "FAIL"]
|
||||
|
||||
color_mapping_pass_fail = {
|
||||
"FAIL": "#e67272",
|
||||
"PASS": "#54d283",
|
||||
"INFO": "#2684FF",
|
||||
"MANUAL": "#636c78",
|
||||
"MUTED (FAIL)": "#fca903",
|
||||
"MUTED (PASS)": "#03fccf",
|
||||
"MUTED (MANUAL)": "#b33696",
|
||||
}
|
||||
# Define custom colors
|
||||
color_mapping = {
|
||||
"critical": "#951649",
|
||||
"high": "#e11d48",
|
||||
"medium": "#ee6f15",
|
||||
"low": "#f9f5e6",
|
||||
"informational": "#3274d9",
|
||||
}
|
||||
|
||||
# Use the color_discrete_map parameter to map categories to custom colors
|
||||
fig2 = px.pie(
|
||||
filtered_data,
|
||||
names="STATUS",
|
||||
hole=0.7,
|
||||
color="STATUS",
|
||||
color_discrete_map=color_mapping_pass_fail,
|
||||
)
|
||||
fig2.update_traces(
|
||||
hovertemplate=None,
|
||||
textposition="outside",
|
||||
textinfo="percent+label",
|
||||
rotation=50,
|
||||
)
|
||||
|
||||
fig2.update_layout(
|
||||
margin=dict(l=0, r=0, t=50, b=0),
|
||||
autosize=True,
|
||||
showlegend=False,
|
||||
font=dict(size=14, color="#292524"),
|
||||
hoverlabel=dict(font_size=12),
|
||||
paper_bgcolor="#FFF",
|
||||
)
|
||||
|
||||
pie_2 = dcc.Graph(
|
||||
figure=fig2,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "300px", "overflow-y": "auto"},
|
||||
)
|
||||
|
||||
# Figure for the bar chart
|
||||
|
||||
color_bars = [
|
||||
color_mapping["critical"],
|
||||
color_mapping["high"],
|
||||
color_mapping["medium"],
|
||||
color_mapping["low"],
|
||||
]
|
||||
|
||||
figure_bars = go.Figure(
|
||||
data=[
|
||||
go.Bar(
|
||||
x=df1["SEVERITY"]
|
||||
.value_counts()
|
||||
.index, # assign x as the dataframe column 'x'
|
||||
y=df1["SEVERITY"].value_counts().values,
|
||||
marker=dict(color=color_bars),
|
||||
textposition="auto",
|
||||
)
|
||||
],
|
||||
layout=go.Layout(
|
||||
paper_bgcolor="#FFF",
|
||||
font=dict(size=12, color="#292524"),
|
||||
margin=dict(l=20, r=20, t=0, b=150),
|
||||
),
|
||||
)
|
||||
|
||||
pie_3 = dcc.Graph(
|
||||
figure=figure_bars,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "400px", "overflow-y": "auto", "margin-top": "0px"},
|
||||
)
|
||||
|
||||
# TABLE
|
||||
severity_dict = {"critical": 3, "high": 2, "medium": 1, "low": 0}
|
||||
fails_findings["SEVERITY"] = fails_findings["SEVERITY"].map(severity_dict)
|
||||
fails_findings = fails_findings.sort_values(by=["SEVERITY"], ascending=False)
|
||||
fails_findings["SEVERITY"] = fails_findings["SEVERITY"].replace(
|
||||
{3: "critical", 2: "high", 1: "medium", 0: "low"}
|
||||
)
|
||||
table_data = fails_findings.copy()
|
||||
|
||||
if "ACCOUNT_NAME" in table_data.columns:
|
||||
for subscription in table_data["ACCOUNT_NAME"].unique():
|
||||
if "nan" not in str(subscription):
|
||||
table_data.loc[
|
||||
table_data["ACCOUNT_NAME"] == subscription, "ACCOUNT_UID"
|
||||
] = subscription
|
||||
|
||||
table_data["RISK"] = table_data["RISK"].str.slice(0, 50)
|
||||
table_data["CHECK_ID"] = (
|
||||
table_data["CHECK_ID"] + " - " + table_data["RESOURCE_UID"]
|
||||
)
|
||||
# if the region is empty, we are going to fill it with '-'
|
||||
table_data["REGION"] = table_data["REGION"].fillna("-")
|
||||
table_data = table_data[
|
||||
[
|
||||
"CHECK_ID",
|
||||
"SEVERITY",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"SERVICE_NAME",
|
||||
"PROVIDER",
|
||||
"ACCOUNT_UID",
|
||||
]
|
||||
]
|
||||
table_data = table_data.rename(
|
||||
columns={
|
||||
"CHECK_ID": "Check ID",
|
||||
"SEVERITY": "Severity",
|
||||
"STATUS": "Status",
|
||||
"REGION": "Region",
|
||||
"SERVICE_NAME": "Service",
|
||||
"PROVIDER": "Provider",
|
||||
"ACCOUNT_UID": "Account ID",
|
||||
}
|
||||
)
|
||||
|
||||
if len(table_data) > 25:
|
||||
table = dbc.Table.from_dataframe(
|
||||
table_data[:25],
|
||||
striped=True,
|
||||
bordered=False,
|
||||
hover=True,
|
||||
className="table-overview",
|
||||
)
|
||||
else:
|
||||
table = dbc.Table.from_dataframe(
|
||||
table_data,
|
||||
striped=True,
|
||||
bordered=False,
|
||||
hover=True,
|
||||
className="table-overview",
|
||||
)
|
||||
|
||||
# Status Graphic
|
||||
status_graph = [
|
||||
html.Span(
|
||||
"Status",
|
||||
className="text-center text-prowler-stone-900 uppercase text-xs font-bold",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
pie_2,
|
||||
],
|
||||
className="w-full",
|
||||
),
|
||||
]
|
||||
|
||||
# Layout two pie charts
|
||||
two_pie_chart = [
|
||||
html.Span(
|
||||
"Severity",
|
||||
className="text-center text-prowler-stone-900 uppercase text-xs font-bold",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
pie_3,
|
||||
],
|
||||
className="",
|
||||
),
|
||||
]
|
||||
|
||||
# Layout Line PLOT
|
||||
line_plot = [
|
||||
html.Span(
|
||||
"Security Posture Evolution (last 7 days)",
|
||||
className="text-center text-prowler-stone-900 uppercase text-xs font-bold",
|
||||
),
|
||||
html.Div([line_chart], className=""),
|
||||
]
|
||||
|
||||
# Table
|
||||
table_card = [
|
||||
html.Div([table], className="grid grid-cols-auto"),
|
||||
]
|
||||
|
||||
# Create Provider Cards
|
||||
aws_card = create_provider_card("aws", aws_provider_logo, "Accounts", filtered_data)
|
||||
azure_card = create_provider_card(
|
||||
"azure", azure_provider_logo, "Subscriptions", filtered_data
|
||||
)
|
||||
gcp_card = create_provider_card("gcp", gcp_provider_logo, "Projects", filtered_data)
|
||||
k8s_card = create_provider_card(
|
||||
"kubernetes", ks8_provider_logo, "Clusters", filtered_data
|
||||
)
|
||||
|
||||
# Subscribe to prowler SaaS card
|
||||
subscribe_card = [
|
||||
html.Div(
|
||||
html.A(
|
||||
[
|
||||
html.Img(src="assets/favicon.ico", className="w-5 mr-3"),
|
||||
html.Span("Subscribe to prowler SaaS"),
|
||||
],
|
||||
href="https://prowler.pro/",
|
||||
target="_blank",
|
||||
className="text-prowler-stone-900 inline-flex px-4 py-2 text-xs font-bold uppercase transition-all rounded-lg text-gray-900 hover:bg-prowler-stone-900/10 border-solid border-1 hover:border-prowler-stone-900/10 hover:border-solid hover:border-1 border-prowler-stone-900/10",
|
||||
),
|
||||
)
|
||||
]
|
||||
if ctx.triggered_id == "download_link":
|
||||
csv_data = dcc.send_data_frame(table_data[:25].to_csv, "mydf.csv")
|
||||
return (
|
||||
status_graph,
|
||||
two_pie_chart,
|
||||
line_plot,
|
||||
table_card,
|
||||
csv_data,
|
||||
cloud_account_values,
|
||||
cloud_accounts_options,
|
||||
region_account_values,
|
||||
region_filter_options,
|
||||
assessment_value,
|
||||
aws_card,
|
||||
azure_card,
|
||||
gcp_card,
|
||||
k8s_card,
|
||||
subscribe_card,
|
||||
list_files,
|
||||
)
|
||||
else:
|
||||
return (
|
||||
status_graph,
|
||||
two_pie_chart,
|
||||
line_plot,
|
||||
table_card,
|
||||
None,
|
||||
cloud_account_values,
|
||||
cloud_accounts_options,
|
||||
region_account_values,
|
||||
region_filter_options,
|
||||
assessment_value,
|
||||
aws_card,
|
||||
azure_card,
|
||||
gcp_card,
|
||||
k8s_card,
|
||||
subscribe_card,
|
||||
list_files,
|
||||
)
|
||||
179
dashboard/src/input.css
Normal file
@@ -0,0 +1,179 @@
|
||||
/*
|
||||
/*
|
||||
/*
|
||||
/*
|
||||
/* Use this file to add custom styles using Tailwind's utility classes. */
|
||||
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
#_dash-app-content {
|
||||
@apply bg-prowler-stone-500;
|
||||
}
|
||||
|
||||
@layer components {
|
||||
.custom-grid {
|
||||
grid-template-columns: minmax(0, 16fr) repeat(11, minmax(0, 11fr));
|
||||
}
|
||||
|
||||
.custom-grid-large {
|
||||
grid-template-columns: minmax(0, 10fr) repeat(11, minmax(0, 11fr));
|
||||
}
|
||||
|
||||
/* Styles for the table in the overview page */
|
||||
.table-overview thead {
|
||||
display: table;
|
||||
width: 100%;
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
.table-overview tbody {
|
||||
-ms-overflow-style: none; /* IE and Edge */
|
||||
scrollbar-width: none; /* Firefox */
|
||||
}
|
||||
|
||||
.table-overview tbody tr {
|
||||
display: table;
|
||||
width: 100%;
|
||||
table-layout: fixed;
|
||||
}
|
||||
/* Styles for thead */
|
||||
.table-overview th {
|
||||
@apply bg-prowler-stone-900 text-sm py-3 font-bold;
|
||||
}
|
||||
|
||||
.table-overview td {
|
||||
@apply text-prowler-stone-900 bg-prowler-white text-sm py-2 font-bold;
|
||||
}
|
||||
|
||||
/* Check ID */
|
||||
.table-overview td:nth-child(1),
|
||||
.table-overview th:nth-child(1) {
|
||||
@apply w-[52%];
|
||||
}
|
||||
/* Severity */
|
||||
.table-overview td:nth-child(2),
|
||||
.table-overview th:nth-child(2) {
|
||||
@apply w-[8%] capitalize;
|
||||
}
|
||||
/* Status */
|
||||
.table-overview td:nth-child(3),
|
||||
.table-overview th:nth-child(3) {
|
||||
@apply w-[7%];
|
||||
}
|
||||
.table-overview td:nth-child(3) {
|
||||
@apply font-bold text-prowler-error;
|
||||
}
|
||||
/* Region */
|
||||
.table-overview td:nth-child(4),
|
||||
.table-overview th:nth-child(4) {
|
||||
@apply w-[9%];
|
||||
}
|
||||
/* Service */
|
||||
.table-overview td:nth-child(5),
|
||||
.table-overview th:nth-child(5) {
|
||||
@apply w-[6%];
|
||||
}
|
||||
/* Provider */
|
||||
.table-overview td:nth-child(6),
|
||||
.table-overview th:nth-child(6) {
|
||||
@apply w-[7%];
|
||||
}
|
||||
/* Account ID */
|
||||
.table-overview td:nth-child(7),
|
||||
.table-overview th:nth-child(7) {
|
||||
@apply w-[11%];
|
||||
}
|
||||
}
|
||||
|
||||
/* Styles for the accordion in the compliance page */
|
||||
#_dash-app-content .accordion .accordion-header .accordion-button {
|
||||
@apply text-prowler-stone-900 inline-block px-4 text-xs font-bold uppercase transition-all rounded-lg bg-prowler-stone-300 hover:bg-prowler-stone-900/10;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .accordion-item {
|
||||
@apply text-prowler-stone-900 bg-prowler-white rounded-lg;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .accordion-button:not(.collapsed) {
|
||||
@apply text-prowler-stone-900 bg-prowler-stone-500;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .dash-table-container {
|
||||
@apply grid;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion table {
|
||||
@apply rounded-lg;
|
||||
}
|
||||
/* Styles for thead */
|
||||
#_dash-app-content .accordion th {
|
||||
@apply text-prowler-white text-left bg-prowler-stone-900 text-xs py-1 font-bold;
|
||||
}
|
||||
|
||||
/* Styles for td */
|
||||
#_dash-app-content .accordion td {
|
||||
@apply text-prowler-stone-900 text-left bg-prowler-white text-xs py-1 font-light;
|
||||
}
|
||||
|
||||
/* Styles for table cells */
|
||||
#_dash-app-content .accordion table tbody thead,
|
||||
#_dash-app-content .accordion table tbody tr {
|
||||
@apply w-full;
|
||||
}
|
||||
|
||||
/* Check ID */
|
||||
#_dash-app-content .accordion table th:nth-child(1) {
|
||||
@apply w-[60%];
|
||||
}
|
||||
/* Status */
|
||||
#_dash-app-content .accordion table th:nth-child(2) {
|
||||
@apply w-[10%] text-center;
|
||||
}
|
||||
#_dash-app-content .accordion table td:nth-child(2) {
|
||||
@apply text-center;
|
||||
}
|
||||
/* Region */
|
||||
#_dash-app-content .accordion table th:nth-child(3) {
|
||||
@apply w-[10%];
|
||||
}
|
||||
/* Account ID */
|
||||
#_dash-app-content .accordion table th:nth-child(4) {
|
||||
@apply w-[10%];
|
||||
}
|
||||
/* Resource ID */
|
||||
#_dash-app-content .accordion table th:nth-child(5) {
|
||||
@apply w-[10%];
|
||||
}
|
||||
|
||||
#_dash-app-content .compliance-data-layout,
|
||||
#_dash-app-content .accordion-body,
|
||||
#_dash-app-content .compliance-data-layout .accordion.accordion-flush {
|
||||
@apply grid gap-y-4;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion-inner--child,
|
||||
#_dash-app-content .accordion-inner {
|
||||
@apply relative;
|
||||
}
|
||||
|
||||
#_dash-app-content .info-bar {
|
||||
@apply absolute left-1/2 transform -translate-x-1/2 top-2 h-8 z-50;
|
||||
}
|
||||
|
||||
#_dash-app-content .info-bar-child {
|
||||
@apply absolute right-6 top-2 w-auto h-8 z-50;
|
||||
}
|
||||
|
||||
@layer utilities {
|
||||
/* Hide scrollbar for Chrome, Safari and Opera */
|
||||
.no-scrollbar::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
/* Hide scrollbar for IE, Edge and Firefox */
|
||||
.no-scrollbar {
|
||||
-ms-overflow-style: none; /* IE and Edge */
|
||||
scrollbar-width: none; /* Firefox */
|
||||
}
|
||||
}
|
||||
90
dashboard/tailwind.config.js
Normal file
@@ -0,0 +1,90 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
module.exports = {
|
||||
content: [
|
||||
"./assets/**/*.{py,html,js}",
|
||||
"./components/**/*.{py,html,js}",
|
||||
"./pages/**/*.{py,html,js}",
|
||||
"./utils/**/*.{py,html,js}",
|
||||
"./app.py",
|
||||
],
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
prowler: {
|
||||
stone: {
|
||||
950: "#1C1917",
|
||||
900: "#292524",
|
||||
500: "#E7E5E4",
|
||||
300: "#F5F5F4",
|
||||
},
|
||||
gray: {
|
||||
900: "#9bAACF",
|
||||
700: "#BEC8E4",
|
||||
500: "#C8D0E7",
|
||||
300: "#E4EBF5",
|
||||
},
|
||||
status: {
|
||||
passed: "#1FB53F",
|
||||
failed: "#A3231F",
|
||||
},
|
||||
lime: "#84CC16",
|
||||
white: "#FFFFFF",
|
||||
error: "#B91C1C",
|
||||
},
|
||||
},
|
||||
fontSize: {
|
||||
'3xs': '0.625rem', // 10px
|
||||
'2xs': '0.6875rem', // 11px
|
||||
xs: '0.75rem', // 12px
|
||||
sm: '0.875rem', // 14px
|
||||
base: '1rem', // 16px
|
||||
lg: '1.125rem', // 18px
|
||||
xl: '1.25rem', // 20px
|
||||
'2xl': '1.375rem', // 22px
|
||||
'2xxl': '1.5rem', // 24px
|
||||
'3xl': '1.75rem', // 28px
|
||||
'4xl': '2rem', // 32px
|
||||
'5xl': '2.25rem', // 36px
|
||||
'6xl': '2.75rem', // 44px
|
||||
'7xl': '3.5rem' // 56px
|
||||
},
|
||||
fontWeight: {
|
||||
light: 300,
|
||||
regular: 400,
|
||||
medium: 500,
|
||||
bold: 700,
|
||||
heavy: 800
|
||||
},
|
||||
lineHeight: {
|
||||
14: "0.875rem", // 14px
|
||||
22: "1.375rem", // 22px
|
||||
26: "1.625rem", // 26px
|
||||
28: "1.75rem", // 28px
|
||||
30: "1.875rem", // 30px
|
||||
32: "2rem", // 32px
|
||||
34: "2.125rem", // 34px
|
||||
36: "2.25rem", // 36px
|
||||
40: "2.5rem", // 40px
|
||||
44: "2.75rem", // 44px
|
||||
48: "3rem", // 48px
|
||||
56: "3.5rem", // 56px
|
||||
68: "4.25rem", // 68px
|
||||
},
|
||||
boxShadow: {
|
||||
"provider":
|
||||
".3rem .3rem .6rem #c8d0e7, -.2rem -.2rem .5rem #FFF",
|
||||
"box-up":
|
||||
"0.3rem 0.3rem 0.6rem #c8d0e7, -0.2rem -0.2rem 0.5rem #FFF",
|
||||
"box-down":
|
||||
"inset .2rem .2rem .5rem #c8d0e7, inset -.2rem -.2rem .5rem #FFF",
|
||||
},
|
||||
backgroundImage: {
|
||||
"gradient-passed":
|
||||
"linear-gradient(127.43deg, #F1F5F8 -177.68%, #4ADE80 87.35%)",
|
||||
"gradient-failed":
|
||||
"linear-gradient(127.43deg, #F1F5F8 -177.68%, #EF4444 87.35%)",
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [],
|
||||
};
|
||||
302
poetry.lock
generated
@@ -701,6 +701,17 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
|
||||
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
||||
uvloop = ["uvloop (>=0.15.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "blinker"
|
||||
version = "1.7.0"
|
||||
description = "Fast, simple object-to-object and broadcast signaling"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"},
|
||||
{file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.26.165"
|
||||
@@ -1121,6 +1132,89 @@ ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
name = "dash"
|
||||
version = "2.16.1"
|
||||
description = "A Python framework for building reactive web-apps. Developed by Plotly."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "dash-2.16.1-py3-none-any.whl", hash = "sha256:8a9d2a618e415113c0b2a4d25d5dc4df5cb921f733b33dde75559db2316b1df1"},
|
||||
{file = "dash-2.16.1.tar.gz", hash = "sha256:b2871d6b8d4c9dfd0a64f89f22d001c93292910b41d92d9ff2bb424a28283976"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
dash-core-components = "2.0.0"
|
||||
dash-html-components = "2.0.0"
|
||||
dash-table = "5.0.0"
|
||||
Flask = ">=1.0.4,<3.1"
|
||||
importlib-metadata = "*"
|
||||
nest-asyncio = "*"
|
||||
plotly = ">=5.0.0"
|
||||
requests = "*"
|
||||
retrying = "*"
|
||||
setuptools = "*"
|
||||
typing-extensions = ">=4.1.1"
|
||||
Werkzeug = "<3.1"
|
||||
|
||||
[package.extras]
|
||||
celery = ["celery[redis] (>=5.1.2)", "redis (>=3.5.3)"]
|
||||
ci = ["black (==22.3.0)", "dash-dangerously-set-inner-html", "dash-flow-example (==0.0.5)", "flake8 (==7.0.0)", "flaky (==3.7.0)", "flask-talisman (==1.0.0)", "jupyterlab (<4.0.0)", "mimesis (<=11.1.0)", "mock (==4.0.3)", "numpy (<=1.26.3)", "openpyxl", "orjson (==3.9.12)", "pandas (>=1.4.0)", "pyarrow", "pylint (==3.0.3)", "pytest-mock", "pytest-rerunfailures", "pytest-sugar (==0.9.6)", "pyzmq (==25.1.2)", "xlrd (>=2.0.1)"]
|
||||
compress = ["flask-compress"]
|
||||
dev = ["PyYAML (>=5.4.1)", "coloredlogs (>=15.0.1)", "fire (>=0.4.0)"]
|
||||
diskcache = ["diskcache (>=5.2.1)", "multiprocess (>=0.70.12)", "psutil (>=5.8.0)"]
|
||||
testing = ["beautifulsoup4 (>=4.8.2)", "cryptography (<3.4)", "dash-testing-stub (>=0.0.2)", "lxml (>=4.6.2)", "multiprocess (>=0.70.12)", "percy (>=2.0.2)", "psutil (>=5.8.0)", "pytest (>=6.0.2)", "requests[security] (>=2.21.0)", "selenium (>=3.141.0,<=4.2.0)", "waitress (>=1.4.4)"]
|
||||
|
||||
[[package]]
|
||||
name = "dash-bootstrap-components"
|
||||
version = "1.5.0"
|
||||
description = "Bootstrap themed components for use in Plotly Dash"
|
||||
optional = false
|
||||
python-versions = ">=3.7, <4"
|
||||
files = [
|
||||
{file = "dash-bootstrap-components-1.5.0.tar.gz", hash = "sha256:083158c07434b9965e2d6c3e8ca72dbbe47dab23e676258cef9bf0ad47d2e250"},
|
||||
{file = "dash_bootstrap_components-1.5.0-py3-none-any.whl", hash = "sha256:b487fec1a85e3d6a8564fe04c0a9cd9e846f75ea9e563456ed3879592889c591"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
dash = ">=2.0.0"
|
||||
|
||||
[package.extras]
|
||||
pandas = ["numpy", "pandas"]
|
||||
|
||||
[[package]]
|
||||
name = "dash-core-components"
|
||||
version = "2.0.0"
|
||||
description = "Core component suite for Dash"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "dash_core_components-2.0.0-py3-none-any.whl", hash = "sha256:52b8e8cce13b18d0802ee3acbc5e888cb1248a04968f962d63d070400af2e346"},
|
||||
{file = "dash_core_components-2.0.0.tar.gz", hash = "sha256:c6733874af975e552f95a1398a16c2ee7df14ce43fa60bb3718a3c6e0b63ffee"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dash-html-components"
|
||||
version = "2.0.0"
|
||||
description = "Vanilla HTML components for Dash"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "dash_html_components-2.0.0-py3-none-any.whl", hash = "sha256:b42cc903713c9706af03b3f2548bda4be7307a7cf89b7d6eae3da872717d1b63"},
|
||||
{file = "dash_html_components-2.0.0.tar.gz", hash = "sha256:8703a601080f02619a6390998e0b3da4a5daabe97a1fd7a9cebc09d015f26e50"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dash-table"
|
||||
version = "5.0.0"
|
||||
description = "Dash table"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "dash_table-5.0.0-py3-none-any.whl", hash = "sha256:19036fa352bb1c11baf38068ec62d172f0515f73ca3276c79dee49b95ddc16c9"},
|
||||
{file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deprecated"
|
||||
version = "1.2.14"
|
||||
@@ -1309,6 +1403,29 @@ mccabe = ">=0.7.0,<0.8.0"
|
||||
pycodestyle = ">=2.11.0,<2.12.0"
|
||||
pyflakes = ">=3.2.0,<3.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "flask"
|
||||
version = "3.0.2"
|
||||
description = "A simple framework for building complex web applications."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "flask-3.0.2-py3-none-any.whl", hash = "sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e"},
|
||||
{file = "flask-3.0.2.tar.gz", hash = "sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
blinker = ">=1.6.2"
|
||||
click = ">=8.1.3"
|
||||
importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""}
|
||||
itsdangerous = ">=2.1.2"
|
||||
Jinja2 = ">=3.1.2"
|
||||
Werkzeug = ">=3.0.0"
|
||||
|
||||
[package.extras]
|
||||
async = ["asgiref (>=3.2)"]
|
||||
dotenv = ["python-dotenv"]
|
||||
|
||||
[[package]]
|
||||
name = "freezegun"
|
||||
version = "1.4.0"
|
||||
@@ -1753,6 +1870,17 @@ files = [
|
||||
[package.extras]
|
||||
colors = ["colorama (>=0.4.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "itsdangerous"
|
||||
version = "2.1.2"
|
||||
description = "Safely pass data to untrusted environments and back."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
|
||||
{file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
version = "3.1.3"
|
||||
@@ -2662,6 +2790,17 @@ files = [
|
||||
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nest-asyncio"
|
||||
version = "1.6.0"
|
||||
description = "Patch asyncio to allow nested event loops"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
|
||||
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "networkx"
|
||||
version = "3.2.1"
|
||||
@@ -2680,6 +2819,51 @@ doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9.
|
||||
extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"]
|
||||
test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "1.26.4"
|
||||
description = "Fundamental package for array computing in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
|
||||
{file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
|
||||
{file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"},
|
||||
{file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"},
|
||||
{file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"},
|
||||
{file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"},
|
||||
{file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"},
|
||||
{file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"},
|
||||
{file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"},
|
||||
{file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"},
|
||||
{file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"},
|
||||
{file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"},
|
||||
{file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"},
|
||||
{file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"},
|
||||
{file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"},
|
||||
{file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"},
|
||||
{file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"},
|
||||
{file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"},
|
||||
{file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"},
|
||||
{file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"},
|
||||
{file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"},
|
||||
{file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"},
|
||||
{file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"},
|
||||
{file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"},
|
||||
{file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"},
|
||||
{file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"},
|
||||
{file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"},
|
||||
{file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"},
|
||||
{file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"},
|
||||
{file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"},
|
||||
{file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"},
|
||||
{file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"},
|
||||
{file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"},
|
||||
{file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"},
|
||||
{file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"},
|
||||
{file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "oauthlib"
|
||||
version = "3.2.2"
|
||||
@@ -2792,6 +2976,79 @@ files = [
|
||||
{file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pandas"
|
||||
version = "2.2.1"
|
||||
description = "Powerful data structures for data analysis, time series, and statistics"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"},
|
||||
{file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"},
|
||||
{file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"},
|
||||
{file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"},
|
||||
{file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"},
|
||||
{file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"},
|
||||
{file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"},
|
||||
{file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"},
|
||||
{file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"},
|
||||
{file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"},
|
||||
{file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"},
|
||||
{file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"},
|
||||
{file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"},
|
||||
{file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"},
|
||||
{file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"},
|
||||
{file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"},
|
||||
{file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"},
|
||||
{file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"},
|
||||
{file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"},
|
||||
{file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"},
|
||||
{file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"},
|
||||
{file = "pandas-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9bd8a40f47080825af4317d0340c656744f2bfdb6819f818e6ba3cd24c0e1397"},
|
||||
{file = "pandas-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df0c37ebd19e11d089ceba66eba59a168242fc6b7155cba4ffffa6eccdfb8f16"},
|
||||
{file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739cc70eaf17d57608639e74d63387b0d8594ce02f69e7a0b046f117974b3019"},
|
||||
{file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d3558d263073ed95e46f4650becff0c5e1ffe0fc3a015de3c79283dfbdb3df"},
|
||||
{file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4aa1d8707812a658debf03824016bf5ea0d516afdea29b7dc14cf687bc4d4ec6"},
|
||||
{file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:76f27a809cda87e07f192f001d11adc2b930e93a2b0c4a236fde5429527423be"},
|
||||
{file = "pandas-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:1ba21b1d5c0e43416218db63037dbe1a01fc101dc6e6024bcad08123e48004ab"},
|
||||
{file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
numpy = [
|
||||
{version = ">=1.22.4,<2", markers = "python_version < \"3.11\""},
|
||||
{version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""},
|
||||
{version = ">=1.23.2,<2", markers = "python_version == \"3.11\""},
|
||||
]
|
||||
python-dateutil = ">=2.8.2"
|
||||
pytz = ">=2020.1"
|
||||
tzdata = ">=2022.7"
|
||||
|
||||
[package.extras]
|
||||
all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"]
|
||||
aws = ["s3fs (>=2022.11.0)"]
|
||||
clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"]
|
||||
compression = ["zstandard (>=0.19.0)"]
|
||||
computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"]
|
||||
consortium-standard = ["dataframe-api-compat (>=0.1.7)"]
|
||||
excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"]
|
||||
feather = ["pyarrow (>=10.0.1)"]
|
||||
fss = ["fsspec (>=2022.11.0)"]
|
||||
gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"]
|
||||
hdf5 = ["tables (>=3.8.0)"]
|
||||
html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"]
|
||||
mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"]
|
||||
output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"]
|
||||
parquet = ["pyarrow (>=10.0.1)"]
|
||||
performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"]
|
||||
plot = ["matplotlib (>=3.6.3)"]
|
||||
postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"]
|
||||
pyarrow = ["pyarrow (>=10.0.1)"]
|
||||
spss = ["pyreadstat (>=1.2.0)"]
|
||||
sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"]
|
||||
test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
|
||||
xml = ["lxml (>=4.9.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "pathable"
|
||||
version = "0.4.3"
|
||||
@@ -2939,6 +3196,21 @@ files = [
|
||||
docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
|
||||
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
|
||||
|
||||
[[package]]
|
||||
name = "plotly"
|
||||
version = "5.20.0"
|
||||
description = "An open-source, interactive data visualization library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "plotly-5.20.0-py3-none-any.whl", hash = "sha256:837a9c8aa90f2c0a2f0d747b82544d014dc2a2bdde967b5bb1da25b53932d1a9"},
|
||||
{file = "plotly-5.20.0.tar.gz", hash = "sha256:bf901c805d22032cfa534b2ff7c5aa6b0659e037f19ec1e0cca7f585918b5c89"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
packaging = "*"
|
||||
tenacity = ">=6.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.4.0"
|
||||
@@ -3622,6 +3894,20 @@ urllib3 = ">=1.25.10,<3.0"
|
||||
[package.extras]
|
||||
tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"]
|
||||
|
||||
[[package]]
|
||||
name = "retrying"
|
||||
version = "1.3.4"
|
||||
description = "Retrying"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "retrying-1.3.4-py3-none-any.whl", hash = "sha256:8cc4d43cb8e1125e0ff3344e9de678fefd85db3b750b81b2240dc0183af37b35"},
|
||||
{file = "retrying-1.3.4.tar.gz", hash = "sha256:345da8c5765bd982b1d1915deb9102fd3d1f7ad16bd84a9700b85f64d24e8f3e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "rfc3339-validator"
|
||||
version = "0.1.4"
|
||||
@@ -4084,6 +4370,20 @@ files = [
|
||||
[package.extras]
|
||||
widechars = ["wcwidth"]
|
||||
|
||||
[[package]]
|
||||
name = "tenacity"
|
||||
version = "8.2.3"
|
||||
description = "Retry code until it succeeds"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"},
|
||||
{file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
doc = ["reno", "sphinx", "tornado (>=4.5)"]
|
||||
|
||||
[[package]]
|
||||
name = "tldextract"
|
||||
version = "5.1.1"
|
||||
@@ -4506,4 +4806,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<3.13"
|
||||
content-hash = "9b65cb6d667de85fcc1797088aa53cd2bb35030ccbc1aed69a9390fc58736871"
|
||||
content-hash = "e414b353bd5fb303fc5b109fe908c3ac556e507c01dcc789b85aa7f8f9463fa0"
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
|
||||
from prowler.__main__ import prowler
|
||||
|
||||
@@ -53,6 +53,10 @@ from prowler.providers.common.quick_inventory import run_provider_quick_inventor
|
||||
|
||||
|
||||
def prowler():
|
||||
if sys.argv[1] == "dashboard":
|
||||
from dashboard.__main__ import dashboard
|
||||
|
||||
sys.exit(dashboard.run(debug=True, port=11666, use_reloader=False))
|
||||
# Parse Arguments
|
||||
parser = ProwlerArgumentParser()
|
||||
args = parser.parse()
|
||||
@@ -81,7 +85,7 @@ def prowler():
|
||||
)
|
||||
|
||||
if not args.no_banner:
|
||||
print_banner(args)
|
||||
print_banner(args.verbose, getattr(args, "fixer", None))
|
||||
|
||||
# We treat the compliance framework as another output format
|
||||
if compliance_framework:
|
||||
|
||||
@@ -138,7 +138,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your_-urroundings-1",
|
||||
"Id": "your-surroundings-1",
|
||||
"Name": "Your Surroundings-1",
|
||||
"Description": "Learn who is on your network. Maintain inventories of network connections (user accounts, vendors, business partners, etc.).",
|
||||
"Attributes": [
|
||||
|
||||
@@ -3,7 +3,7 @@ from colorama import Fore, Style
|
||||
from prowler.config.config import banner_color, orange_color, prowler_version, timestamp
|
||||
|
||||
|
||||
def print_banner(args):
|
||||
def print_banner(verbose: bool, fixer: bool = False):
|
||||
banner = rf"""{banner_color} _
|
||||
_ __ _ __ _____ _| | ___ _ __
|
||||
| '_ \| '__/ _ \ \ /\ / / |/ _ \ '__|
|
||||
@@ -15,7 +15,7 @@ def print_banner(args):
|
||||
"""
|
||||
print(banner)
|
||||
|
||||
if args.verbose or getattr(args, "fix", None):
|
||||
if verbose or fixer:
|
||||
print(
|
||||
f"""
|
||||
Color code for results:
|
||||
|
||||
@@ -36,7 +36,12 @@ def write_compliance_row_cis(
|
||||
elif compliance.Provider == "Kubernetes":
|
||||
(compliance_row, csv_header) = (
|
||||
generate_compliance_row_cis_kubernetes(
|
||||
finding, compliance, requirement, attribute, output_options
|
||||
finding,
|
||||
compliance,
|
||||
requirement,
|
||||
attribute,
|
||||
output_options,
|
||||
provider,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -5,12 +5,13 @@ from prowler.lib.utils.utils import outputs_unix_timestamp
|
||||
|
||||
|
||||
def generate_compliance_row_cis_kubernetes(
|
||||
finding, compliance, requirement, attribute, output_options
|
||||
finding, compliance, requirement, attribute, output_options, provider
|
||||
):
|
||||
compliance_row = Check_Output_CSV_KUBERNETES_CIS(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
Region=finding.namespace,
|
||||
Context=provider.identity.context,
|
||||
Namespace=finding.namespace,
|
||||
AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
|
||||
@@ -120,7 +120,8 @@ class Check_Output_CSV_KUBERNETES_CIS(BaseModel):
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
Region: str
|
||||
Context: str
|
||||
Namespace: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
|
||||
@@ -19,10 +19,11 @@ maintainers = [
|
||||
]
|
||||
name = "prowler"
|
||||
packages = [
|
||||
{include = "prowler"}
|
||||
{include = "prowler"},
|
||||
{include = "dashboard"}
|
||||
]
|
||||
readme = "README.md"
|
||||
version = "3.15.0"
|
||||
version = "4.0.0"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
alive-progress = "3.1.5"
|
||||
@@ -63,6 +64,15 @@ shodan = "1.31.0"
|
||||
slack-sdk = "3.27.1"
|
||||
tabulate = "0.9.0"
|
||||
|
||||
[tool.poetry.group.dashboards]
|
||||
optional = true
|
||||
|
||||
[tool.poetry.group.dashboards.dependencies]
|
||||
dash = "2.16.1"
|
||||
dash-bootstrap-components = "1.5.0"
|
||||
numpy = "1.26.4"
|
||||
pandas = "2.2.1"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.8"
|
||||
black = "24.3.0"
|
||||
|
||||