mirror of
https://github.com/prowler-cloud/prowler.git
synced 2025-12-19 05:17:47 +00:00
feat(groups): Launch specific checks from groups and services (#1204)
This commit is contained in:
@@ -210,7 +210,7 @@ Prowler has been written in bash using AWS-CLI underneath and it works in Linux,
|
||||
or all checks but some of them:
|
||||
|
||||
```sh
|
||||
./prowler -E check42,check43
|
||||
./prowler -e check42,check43
|
||||
```
|
||||
|
||||
or for custom profile and region:
|
||||
@@ -228,7 +228,7 @@ Prowler has been written in bash using AWS-CLI underneath and it works in Linux,
|
||||
or exclude some checks in the group:
|
||||
|
||||
```sh
|
||||
./prowler -g group4 -E check42,check43
|
||||
./prowler -g group4 -e check42,check43
|
||||
```
|
||||
|
||||
Valid check numbers are based on the AWS CIS Benchmark guide, so 1.1 is check11 and 3.10 is check310
|
||||
|
||||
0
config/__init__.py
Normal file
0
config/__init__.py
Normal file
@@ -2,3 +2,6 @@ from datetime import datetime
|
||||
|
||||
timestamp = datetime.today().strftime("%Y-%m-%d %H:%M:%S")
|
||||
prowler_version = "3.0-alfa"
|
||||
|
||||
# Groups
|
||||
groups_file = "groups.json"
|
||||
8
groups.json
Normal file
8
groups.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"aws": {
|
||||
"gdpr": [
|
||||
"check11",
|
||||
"check12"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
from colorama import Fore, Style
|
||||
|
||||
from lib.config import prowler_version, timestamp
|
||||
from config.config import prowler_version, timestamp
|
||||
|
||||
|
||||
def print_version():
|
||||
|
||||
@@ -1,37 +1,60 @@
|
||||
import importlib
|
||||
import json
|
||||
import pkgutil
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from types import ModuleType
|
||||
|
||||
from config.config import groups_file
|
||||
from lib.logger import logger
|
||||
from lib.outputs import report
|
||||
from lib.utils.utils import open_file, parse_json_file
|
||||
|
||||
|
||||
# Exclude checks to run
|
||||
def exclude_checks_to_run(checks_to_execute, excluded_checks):
|
||||
def exclude_checks_to_run(checks_to_execute: set, excluded_checks: list) -> set:
|
||||
for check in excluded_checks:
|
||||
checks_to_execute.discard(check)
|
||||
return checks_to_execute
|
||||
|
||||
# Parse checks from file
|
||||
def parse_checks_from_file(checks_file):
|
||||
|
||||
# Load checks from checklist.json
|
||||
def parse_checks_from_file(input_file: str, provider: str) -> set:
|
||||
checks_to_execute = set()
|
||||
with open(checks_file) as f:
|
||||
for line in f:
|
||||
# Remove comments from file
|
||||
line = line.partition("#")[0].strip()
|
||||
# If file contains several checks comma-separated
|
||||
if "," in line:
|
||||
for check in line.split(","):
|
||||
checks_to_execute.add(check.strip())
|
||||
# If line is not empty
|
||||
elif len(line):
|
||||
checks_to_execute.add(line)
|
||||
f = open_file(input_file)
|
||||
json_file = parse_json_file(f)
|
||||
|
||||
for check_name in json_file[provider]:
|
||||
checks_to_execute.add(check_name)
|
||||
|
||||
return checks_to_execute
|
||||
|
||||
|
||||
def load_checks_to_execute(checks_file, check_list, provider):
|
||||
# Load checks from groups.json
|
||||
def parse_groups_from_file(group_list: list, provider: str) -> set:
|
||||
checks_to_execute = set()
|
||||
f = open_file(groups_file)
|
||||
available_groups = parse_json_file(f)
|
||||
|
||||
for group in group_list:
|
||||
if group in available_groups[provider]:
|
||||
for check_name in available_groups[provider][group]:
|
||||
checks_to_execute.add(check_name)
|
||||
else:
|
||||
logger.error(
|
||||
f"Group '{group}' was not found for the {provider.upper()} provider"
|
||||
)
|
||||
return checks_to_execute
|
||||
|
||||
|
||||
# Generate the list of checks to execute
|
||||
def load_checks_to_execute(
|
||||
checks_file: str,
|
||||
check_list: list,
|
||||
service_list: list,
|
||||
group_list: list,
|
||||
provider: str,
|
||||
) -> set:
|
||||
|
||||
checks_to_execute = set()
|
||||
|
||||
# Handle if there are checks passed using -c/--checks
|
||||
@@ -42,28 +65,59 @@ def load_checks_to_execute(checks_file, check_list, provider):
|
||||
# Handle if there are checks passed using -C/--checks-file
|
||||
elif checks_file:
|
||||
try:
|
||||
checks_to_execute = parse_checks_from_file(checks_file)
|
||||
checks_to_execute = parse_checks_from_file(checks_file, provider)
|
||||
except Exception as e:
|
||||
logger.error(f"{checks_file}: {e.__class__.__name__}")
|
||||
logger.error(f"{e.__class__.__name__} -- {e}")
|
||||
|
||||
# Handle if there are services passed using -s/--services
|
||||
elif service_list:
|
||||
# Loaded dynamically from modules within provider/services
|
||||
for service in service_list:
|
||||
modules = recover_modules_from_provider(provider, service)
|
||||
if not modules:
|
||||
logger.error(f"Service '{service}' was not found for the AWS provider")
|
||||
else:
|
||||
for check_module in modules:
|
||||
# Recover check name and module name from import path
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check_module.split(".")[-1]
|
||||
# If the service is present in the group list passed as parameters
|
||||
# if service_name in group_list: checks_to_execute.add(check_name)
|
||||
checks_to_execute.add(check_name)
|
||||
|
||||
# Handle if there are groups passed using -g/--groups
|
||||
elif group_list:
|
||||
try:
|
||||
checks_to_execute = parse_groups_from_file(group_list, provider)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__} -- {e}")
|
||||
|
||||
# If there are no checks passed as argument
|
||||
else:
|
||||
# Get all check modules to run with the specific provider
|
||||
modules = recover_modules_from_provider(provider)
|
||||
for check_module in modules:
|
||||
# Recover check name from import path (last part)
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check_module.split(".")[-1]
|
||||
checks_to_execute.add(check_name)
|
||||
try:
|
||||
# Get all check modules to run with the specific provider
|
||||
modules = recover_modules_from_provider(provider)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__} -- {e}")
|
||||
else:
|
||||
for check_module in modules:
|
||||
# Recover check name from import path (last part)
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check_module.split(".")[-1]
|
||||
checks_to_execute.add(check_name)
|
||||
|
||||
return checks_to_execute
|
||||
|
||||
|
||||
def recover_modules_from_provider(provider):
|
||||
def recover_modules_from_provider(provider: str, service: str = None) -> list:
|
||||
modules = []
|
||||
module_path = f"providers.{provider}.services"
|
||||
if service:
|
||||
module_path += f".{service}"
|
||||
|
||||
for module_name in pkgutil.walk_packages(
|
||||
importlib.import_module(f"providers.{provider}.services").__path__,
|
||||
importlib.import_module(f"providers.{provider}.services").__name__ + ".",
|
||||
importlib.import_module(module_path).__path__,
|
||||
importlib.import_module(module_path).__name__ + ".",
|
||||
):
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
if module_name.name.count(".") == 5:
|
||||
@@ -78,7 +132,7 @@ def run_check(check):
|
||||
report(findings)
|
||||
|
||||
|
||||
def import_check(check_path):
|
||||
def import_check(check_path: str) -> ModuleType:
|
||||
lib = importlib.import_module(f"{check_path}")
|
||||
return lib
|
||||
|
||||
@@ -199,8 +253,9 @@ class Check(ABC):
|
||||
|
||||
def __parse_metadata__(self, metadata_file):
|
||||
# Opening JSON file
|
||||
f = open(metadata_file)
|
||||
check_metadata = json.load(f)
|
||||
f = open_file(metadata_file)
|
||||
# Parse JSON
|
||||
check_metadata = parse_json_file(f)
|
||||
return check_metadata
|
||||
|
||||
# Validate metadata
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
import os
|
||||
|
||||
from lib.check.check import exclude_checks_to_run, parse_checks_from_file
|
||||
from lib.check.check import (
|
||||
exclude_checks_to_run,
|
||||
parse_checks_from_file,
|
||||
parse_groups_from_file,
|
||||
)
|
||||
|
||||
|
||||
class Test_Check:
|
||||
@@ -18,23 +22,29 @@ class Test_Check:
|
||||
def test_parse_checks_from_file(self):
|
||||
test_cases = [
|
||||
{
|
||||
"input": f"{os.path.dirname(os.path.realpath(__file__))}/fixtures/checklistA.txt",
|
||||
"expected": {"check12", "check11", "extra72", "check13"},
|
||||
},
|
||||
{
|
||||
"input": f"{os.path.dirname(os.path.realpath(__file__))}/fixtures/checklistB.txt",
|
||||
"expected": {
|
||||
"extra72",
|
||||
"check13",
|
||||
"check11",
|
||||
"check12",
|
||||
"check56",
|
||||
"check2423",
|
||||
"input": {
|
||||
"path": f"{os.path.dirname(os.path.realpath(__file__))}/fixtures/checklistA.json",
|
||||
"provider": "aws",
|
||||
},
|
||||
},
|
||||
"expected": {"check11", "check12", "check7777"},
|
||||
}
|
||||
]
|
||||
for test in test_cases:
|
||||
assert parse_checks_from_file(test["input"]) == test["expected"]
|
||||
check_file = test["input"]["path"]
|
||||
provider = test["input"]["provider"]
|
||||
assert parse_checks_from_file(check_file, provider) == test["expected"]
|
||||
|
||||
def test_parse_groups_from_file(self):
|
||||
test_cases = [
|
||||
{
|
||||
"input": {"groups": ["gdpr"], "provider": "aws"},
|
||||
"expected": {"check11", "check12"},
|
||||
}
|
||||
]
|
||||
for test in test_cases:
|
||||
provider = test["input"]["provider"]
|
||||
groups = test["input"]["groups"]
|
||||
assert parse_groups_from_file(groups, provider) == test["expected"]
|
||||
|
||||
def test_exclude_checks_to_run(self):
|
||||
test_cases = [
|
||||
|
||||
7
lib/check/fixtures/checklistA.json
Normal file
7
lib/check/fixtures/checklistA.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"aws": [
|
||||
"check11",
|
||||
"check12",
|
||||
"check7777"
|
||||
]
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
# You can add a comma seperated list of checks like this:
|
||||
check11,check12
|
||||
extra72 # You can also use newlines for each check
|
||||
check13 # This way allows you to add inline comments
|
||||
# Both of these can be combined if you have a standard list and want to add
|
||||
# inline comments for other checks.
|
||||
@@ -1,11 +0,0 @@
|
||||
# You can add a comma seperated list of checks like this:
|
||||
check11,check12
|
||||
extra72 # You can also use newlines for each check
|
||||
check13 # This way allows you to add inline comments
|
||||
# Both of these can be combined if you have a standard list and want to add
|
||||
# inline comments for other checks.
|
||||
#
|
||||
#
|
||||
#
|
||||
# check11,check12
|
||||
check2423,check56
|
||||
8
lib/check/fixtures/groupsA.json
Normal file
8
lib/check/fixtures/groupsA.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"aws": {
|
||||
"gdpr": [
|
||||
"check11",
|
||||
"check12"
|
||||
]
|
||||
}
|
||||
}
|
||||
0
lib/utils/__init__.py
Normal file
0
lib/utils/__init__.py
Normal file
27
lib/utils/utils.py
Normal file
27
lib/utils/utils.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import json
|
||||
from io import TextIOWrapper
|
||||
from typing import Any
|
||||
|
||||
from lib.logger import logger
|
||||
|
||||
|
||||
def open_file(input_file: str) -> TextIOWrapper:
|
||||
try:
|
||||
# First recover the available groups in groups.json
|
||||
f = open(input_file)
|
||||
except Exception as e:
|
||||
logger.critical(f"{input_file}: {e.__class__.__name__}")
|
||||
quit()
|
||||
else:
|
||||
return f
|
||||
|
||||
|
||||
# Parse checks from file
|
||||
def parse_json_file(input_file: TextIOWrapper) -> Any:
|
||||
try:
|
||||
json_file = json.load(input_file)
|
||||
except Exception as e:
|
||||
logger.critical(f"{input_file.name}: {e.__class__.__name__}")
|
||||
quit()
|
||||
else:
|
||||
return json_file
|
||||
51
prowler.py
51
prowler.py
@@ -19,10 +19,12 @@ if __name__ == "__main__":
|
||||
parser.add_argument("provider", choices=["aws"], help="Specify Provider")
|
||||
|
||||
# Arguments to set checks to run
|
||||
# -c can't be used with -C
|
||||
# The following arguments needs to be set exclusivelly
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument("-c", "--checks", nargs="+", help="List of checks")
|
||||
group.add_argument("-C", "--checks-file", nargs="?", help="List of checks")
|
||||
group.add_argument("-s", "--services", nargs="+", help="List of services")
|
||||
group.add_argument("-g", "--groups", nargs="+", help="List of groups")
|
||||
|
||||
parser.add_argument("-e", "--excluded-checks", nargs="+", help="Checks to exclude")
|
||||
parser.add_argument(
|
||||
@@ -79,6 +81,8 @@ if __name__ == "__main__":
|
||||
provider = args.provider
|
||||
checks = args.checks
|
||||
excluded_checks = args.excluded_checks
|
||||
services = args.services
|
||||
groups = args.groups
|
||||
checks_file = args.checks_file
|
||||
|
||||
# Role assumption input options tests
|
||||
@@ -124,31 +128,32 @@ if __name__ == "__main__":
|
||||
|
||||
# Load checks to execute
|
||||
logger.debug("Loading checks")
|
||||
checks_to_execute = load_checks_to_execute(checks_file, checks, provider)
|
||||
|
||||
checks_to_execute = load_checks_to_execute(
|
||||
checks_file, checks, services, groups, provider
|
||||
)
|
||||
# Exclude checks if -e
|
||||
if excluded_checks:
|
||||
checks_to_execute = exclude_checks_to_run(checks_to_execute, excluded_checks)
|
||||
|
||||
|
||||
# Execute checks
|
||||
for check_name in checks_to_execute:
|
||||
# Recover service from check name
|
||||
service = check_name.split("_")[0]
|
||||
try:
|
||||
# Import check module
|
||||
check_module_path = (
|
||||
f"providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
)
|
||||
lib = import_check(check_module_path)
|
||||
# Recover functions from check
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
c = check_to_execute()
|
||||
# Run check
|
||||
run_check(c)
|
||||
if len(checks_to_execute):
|
||||
for check_name in checks_to_execute:
|
||||
# Recover service from check name
|
||||
service = check_name.split("_")[0]
|
||||
try:
|
||||
# Import check module
|
||||
check_module_path = (
|
||||
f"providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
)
|
||||
lib = import_check(check_module_path)
|
||||
# Recover functions from check
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
c = check_to_execute()
|
||||
# Run check
|
||||
run_check(c)
|
||||
|
||||
# If check does not exists in the provider or is from another provider
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
f"Check '{check_name}' was not found for the {provider.upper()} provider"
|
||||
)
|
||||
# If check does not exists in the provider or is from another provider
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
f"Check '{check_name}' was not found for the {provider.upper()} provider"
|
||||
)
|
||||
|
||||
6
util/checklist_example.json
Normal file
6
util/checklist_example.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"aws": [
|
||||
"check11",
|
||||
"check12"
|
||||
]
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
# You can add a comma seperated list of checks like this:
|
||||
check11,check12
|
||||
extra72 # You can also use newlines for each check
|
||||
check13 # This way allows you to add inline comments
|
||||
# Both of these can be combined if you have a standard list and want to add
|
||||
# inline comments for other checks.
|
||||
Reference in New Issue
Block a user