Compare commits

..

1 Commits

Author SHA1 Message Date
Pepe Fagoaga
4d337c462b Revert "fix(custom): execute custom checks (#4202)"
This reverts commit 2a139e3dc7.
2024-06-17 13:17:38 +02:00
563 changed files with 9316 additions and 37299 deletions

6
.github/CODEOWNERS vendored
View File

@@ -1,5 +1 @@
* @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
# To protect a repository fully against unauthorized changes, you also need to define an owner for the CODEOWNERS file itself.
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-and-branch-protection
/.github/ @prowler-cloud/sdk
* @prowler-cloud/prowler-oss @prowler-cloud/prowler-dev

View File

@@ -1,5 +1,6 @@
name: 🐞 Bug Report
description: Create a report to help us improve
title: "[Bug]: "
labels: ["bug", "status/needs-triage"]
body:

View File

@@ -1,7 +1,8 @@
name: 💡 Feature Request
name: 💡 Feature Request
description: Suggest an idea for this project
labels: ["feature-request", "status/needs-triage"]
body:
- type: textarea
id: Problem

View File

@@ -8,7 +8,7 @@ updates:
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
interval: "weekly"
open-pull-requests-limit: 10
target-branch: master
labels:
@@ -17,14 +17,14 @@ updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
interval: "weekly"
open-pull-requests-limit: 10
target-branch: master
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v3
labels:
@@ -34,7 +34,7 @@ updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v3
labels:

50
.github/labeler.yml vendored
View File

@@ -29,53 +29,3 @@ github_actions:
cli:
- changed-files:
- any-glob-to-any-file: "cli/**"
mutelist:
- changed-files:
- any-glob-to-any-file: "prowler/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/aws/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/azure/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/gcp/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/kubernetes/lib/mutelist/**"
- any-glob-to-any-file: "tests/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/aws/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/azure/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/gcp/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/kubernetes/lib/mutelist/**"
integration/s3:
- changed-files:
- any-glob-to-any-file: "prowler/providers/aws/lib/s3/**"
- any-glob-to-any-file: "tests/providers/aws/lib/s3/**"
integration/slack:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/slack/**"
- any-glob-to-any-file: "tests/lib/outputs/slack/**"
integration/security-hub:
- changed-files:
- any-glob-to-any-file: "prowler/providers/aws/lib/security_hub/**"
- any-glob-to-any-file: "tests/providers/aws/lib/security_hub/**"
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
output/html:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/html/**"
- any-glob-to-any-file: "tests/lib/outputs/html/**"
output/asff:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
output/ocsf:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/ocsf/**"
- any-glob-to-any-file: "tests/lib/outputs/ocsf/**"
output/csv:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/csv/**"
- any-glob-to-any-file: "tests/lib/outputs/csv/**"

View File

@@ -2,18 +2,11 @@
Please include relevant motivation and context for this PR.
If fixes an issue please add it with `Fix #XXXX`
### Description
Please include a summary of the change and which issue is fixed. List any dependencies that are required for this change.
### Checklist
- Are there new checks included in this PR? Yes / No
- If so, do we need to update permissions for the provider? Please review this carefully.
- [ ] Review if the code is being covered by tests.
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
### License

View File

@@ -43,7 +43,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
prowler_version: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION }}
prowler_version: ${{ steps.update-prowler-version.outputs.PROWLER_VERSION }}
env:
POETRY_VIRTUALENVS_CREATE: "false"
@@ -65,8 +65,6 @@ jobs:
id: get-prowler-version
run: |
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
# Store prowler version major just for the release
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
@@ -91,6 +89,15 @@ jobs:
;;
esac
- name: Update Prowler version (release)
id: update-prowler-version
if: github.event_name == 'release'
run: |
PROWLER_VERSION="${{ github.event.release.tag_name }}"
poetry version "${PROWLER_VERSION}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
- name: Login to DockerHub
uses: docker/login-action@v3
with:
@@ -111,7 +118,7 @@ jobs:
- name: Build and push container image (latest)
if: github.event_name == 'push'
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
push: true
tags: |
@@ -123,7 +130,7 @@ jobs:
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
# Use local context to get changes
# https://github.com/docker/build-push-action#path-context

View File

@@ -13,10 +13,10 @@ name: "CodeQL"
on:
push:
branches: [ "master", "v3", "v4.*" ]
branches: [ "master", "v3" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "master", "v3", "v4.*" ]
branches: [ "master", "v3" ]
schedule:
- cron: '00 12 * * *'

View File

@@ -11,7 +11,7 @@ jobs:
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@3.80.4
uses: trufflesecurity/trufflehog@v3.78.0
with:
path: ./
base: ${{ github.event.repository.default_branch }}

View File

@@ -5,12 +5,10 @@ on:
branches:
- "master"
- "v3"
- "v4.*"
pull_request:
branches:
- "master"
- "v3"
- "v4.*"
jobs:
build:
runs-on: ubuntu-latest
@@ -75,7 +73,7 @@ jobs:
- name: Safety
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run safety check --ignore 70612
poetry run safety check --ignore 67599 --ignore 70612
- name: Vulture
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |

View File

@@ -40,6 +40,7 @@ jobs:
- name: Install dependencies
run: |
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: Setup Python
uses: actions/setup-python@v5
@@ -47,6 +48,10 @@ jobs:
python-version: ${{ env.PYTHON_VERSION }}
cache: ${{ env.CACHE }}
- name: Update Poetry and config version
run: |
poetry version ${{ env.RELEASE_TAG }}
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
@@ -55,6 +60,22 @@ jobs:
git_user_signingkey: true
git_commit_gpgsign: true
- name: Push updated version to the release tag
run: |
# Configure Git
git config user.name "github-actions"
git config user.email "${{ env.GIT_COMMITTER_EMAIL }}"
# Add the files with the version changed
git add prowler/config/config.py pyproject.toml
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify -S
# Replace the tag with the version updated
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}" --sign
# Push the tag
git push -f origin ${{ env.RELEASE_TAG }}
- name: Build Prowler package
run: |
poetry build

View File

@@ -97,7 +97,7 @@ repos:
- id: safety
name: safety
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
entry: bash -c 'safety check --ignore 70612'
entry: bash -c 'safety check --ignore 67599 --ignore 70612'
language: system
- id: vulture

View File

@@ -37,9 +37,6 @@
<a href="https://twitter.com/prowlercloud"><img alt="Twitter" src="https://img.shields.io/twitter/follow/prowlercloud?style=social"></a>
</p>
<hr>
<p align="center">
<img align="center" src="/docs/img/prowler-cli-quick.gif" width="100%" height="100%">
</p>
# Description
@@ -63,9 +60,9 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 385 | 67 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 7 -> `prowler aws --list-categories` |
| AWS | 360 | 66 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 7 -> `prowler aws --list-categories` |
| GCP | 77 | 13 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 135 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| Azure | 127 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
# 💻 Installation
@@ -94,7 +91,7 @@ The container images are available here:
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
## From GitHub
## From Github
Python >= 3.9, < 3.13 is required with pip and poetry:

6
cli/cli.md Normal file
View File

@@ -0,0 +1,6 @@
# CLI
To show the banner, use:
`python cli/cli.py banner`
## Listing
List services by provider.
`python cli/cli.py <provider> list-services`

63
cli/cli.py Normal file
View File

@@ -0,0 +1,63 @@
import typer
from prowler.lib.banner import print_banner
from prowler.lib.check.check import (
list_fixers,
list_services,
print_fixers,
print_services,
)
app = typer.Typer()
aws = typer.Typer(name="aws")
azure = typer.Typer(name="azure")
gcp = typer.Typer(name="gcp")
kubernetes = typer.Typer(name="kubernetes")
app.add_typer(aws, name="aws")
app.add_typer(azure, name="azure")
app.add_typer(gcp, name="gcp")
app.add_typer(kubernetes, name="kubernetes")
def list_resources(provider: str, resource_type: str):
if resource_type == "services":
print_services(list_services(provider))
elif resource_type == "fixers":
print_fixers(list_fixers(provider))
def create_list_commands(provider_typer: typer.Typer):
provider_name = provider_typer.info.name
@provider_typer.command(
"list-services",
help=f"List the {provider_name} services that are supported by Prowler.",
)
def list_services_command():
list_resources(provider_name, "services")
@provider_typer.command(
"list-fixers",
help=f"List the {provider_name} fixers that are supported by Prowler.",
)
def list_fixers_command():
list_resources(provider_name, "fixers")
create_list_commands(aws)
create_list_commands(azure)
create_list_commands(gcp)
create_list_commands(kubernetes)
@app.command("banner", help="Prints the banner of the tool.")
def banner(show: bool = True):
if show:
print_banner(show)
else:
print("Banner is not shown.")
if __name__ == "__main__":
app()

View File

@@ -1,23 +0,0 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -1,24 +0,0 @@
apiVersion: v2
name: prowler
description: Prowler Security Tool Helm chart for Kubernetes
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.1
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "1.16.0"

View File

@@ -1,78 +0,0 @@
# prowler
![Version: 0.1.1](https://img.shields.io/badge/Version-0.1.1-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 1.16.0](https://img.shields.io/badge/AppVersion-1.16.0-informational?style=flat-square)
Prowler Security Tool Helm chart for Kubernetes
# Prowler Helm Chart Deployment
This guide provides step-by-step instructions for deploying the Prowler Helm chart.
## Prerequisites
Before you begin, ensure you have the following:
1. A running Kubernetes cluster.
2. Helm installed on your local machine. If you don't have Helm installed, you can follow the [Helm installation guide](https://helm.sh/docs/intro/install/).
3. Proper access to your Kubernetes cluster (e.g., `kubectl` is configured and working).
## Deployment Steps
### 1. Clone the Repository
Clone the repository containing the Helm chart to your local machine.
```sh
git clone git@github.com:prowler-cloud/prowler.git
cd prowler/contrib/k8s/helm
```
### 2. Deploy the helm chart
```
helm install prowler .
```
### 3. Verify the deployment
```
helm status prowler
kubectl get all -n prowler-ns
```
### 4. Clean Up
To uninstall the Helm release and clean up the resources, run:
```helm uninstall prowler
kubectl delete namespace prowler-ns
```
## Values
| Key | Type | Default | Description |
|-----|------|---------|-------------|
| clusterRole.name | string | `"prowler-read-cluster"` | |
| clusterRoleBinding.name | string | `"prowler-read-cluster-binding"` | |
| configMap.name | string | `"prowler-hostpaths"` | |
| configMapData.etcCniNetd | string | `"/etc/cni/net.d"` | |
| configMapData.etcKubernetes | string | `"/etc/kubernetes"` | |
| configMapData.etcSystemd | string | `"/etc/systemd"` | |
| configMapData.libSystemd | string | `"/lib/systemd"` | |
| configMapData.optCniBin | string | `"/opt/cni/bin"` | |
| configMapData.usrBin | string | `"/usr/bin"` | |
| configMapData.varLibCni | string | `"/var/lib/cni"` | |
| configMapData.varLibEtcd | string | `"/var/lib/etcd"` | |
| configMapData.varLibKubeControllerManager | string | `"/var/lib/kube-controller-manager"` | |
| configMapData.varLibKubeScheduler | string | `"/var/lib/kube-scheduler"` | |
| configMapData.varLibKubelet | string | `"/var/lib/kubelet"` | |
| cronjob.hostPID | bool | `true` | |
| cronjob.name | string | `"prowler"` | |
| cronjob.schedule | string | `"0 0 * * *"` | |
| image.pullPolicy | string | `"Always"` | |
| image.repository | string | `"toniblyx/prowler"` | |
| image.tag | string | `"stable"` | |
| namespace.name | string | `"prowler"` | |
| serviceAccount.name | string | `"prowler"` | |
----------------------------------------------
Autogenerated from chart metadata using [helm-docs v1.11.3](https://github.com/norwoodj/helm-docs/releases/v1.11.3)

View File

@@ -1,11 +0,0 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: {{ .Values.clusterRole.name }}
rules:
- apiGroups: [""]
resources: ["pods", "configmaps", "nodes", "namespaces"]
verbs: ["get", "list", "watch"]
- apiGroups: ["rbac.authorization.k8s.io"]
resources: ["clusterrolebindings", "rolebindings", "clusterroles", "roles"]
verbs: ["get", "list", "watch"]

View File

@@ -1,18 +0,0 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ .Values.configMap.name }}
namespace: {{ .Values.namespace.name }}
data:
varLibCni: "{{ .Values.configMap.data.varLibCni }}"
varLibEtcd: "{{ .Values.configMap.data.varLibEtcd }}"
varLibKubelet: "{{ .Values.configMap.data.varLibKubelet }}"
varLibKubeScheduler: "{{ .Values.configMap.data.varLibKubeScheduler }}"
varLibKubeControllerManager: "{{ .Values.configMap.data.varLibKubeControllerManager }}"
etcSystemd: "{{ .Values.configMap.data.etcSystemd }}"
libSystemd: "{{ .Values.configMap.data.libSystemd }}"
etcKubernetes: "{{ .Values.configMap.data.etcKubernetes }}"
usrBin: "{{ .Values.configMap.data.usrBin }}"
etcCniNetd: "{{ .Values.configMap.data.etcCniNetd }}"
optCniBin: "{{ .Values.configMap.data.optCniBin }}"
srvKubernetes: "{{ .Values.configMap.data.srvKubernetes }}"

View File

@@ -1,42 +0,0 @@
apiVersion: batch/v1
kind: CronJob
metadata:
name: {{ .Values.cronjob.name }}
namespace: {{ .Values.namespace.name }}
spec:
schedule: "{{ .Values.cronjob.schedule }}"
jobTemplate:
spec:
template:
metadata:
labels:
app: prowler
spec:
serviceAccountName: {{ .Values.serviceAccount.name }}
containers:
- name: prowler
image: {{ .Values.image.repository }}:{{ .Values.image.tag }}
command: ["prowler"]
args: ["kubernetes", "-z", "-b"]
imagePullPolicy: {{ .Values.image.pullPolicy }}
volumeMounts:
{{- range $key, $value := .Values.configMap.data }}
{{- if and (eq $.Values.clusterType "gke") (eq $key "srvKubernetes") }}
{{- else }}
- name: {{ $key | lower }}
mountPath: {{ $value }}
readOnly: true
{{- end }}
{{- end }}
hostPID: {{ .Values.cronjob.hostPID }}
restartPolicy: Never
volumes:
{{- range $key, $value := .Values.configMap.data }}
{{- if and (eq $.Values.clusterType "gke") (eq $key "srvKubernetes") }}
{{- else }}
- name: {{ $key | lower }}
hostPath:
path: {{ $value }}
{{- end }}
{{- end }}

View File

@@ -1,4 +0,0 @@
apiVersion: v1
kind: Namespace
metadata:
name: {{ .Values.namespace.name }}

View File

@@ -1,12 +0,0 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: {{ .Values.clusterRoleBinding.name }}
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: {{ .Values.clusterRole.name }}
subjects:
- kind: ServiceAccount
name: {{ .Values.serviceAccount.name }}
namespace: {{ .Values.namespace.name }}

View File

@@ -1,5 +0,0 @@
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ .Values.serviceAccount.name }}
namespace: {{ .Values.namespace.name }}

View File

@@ -1,40 +0,0 @@
namespace:
name: prowler-ns
cronjob:
name: prowler
schedule: "0 0 * * *"
hostPID: true
serviceAccount:
name: prowler-sa
image:
repository: toniblyx/prowler
tag: stable
pullPolicy: Always
clusterType:
configMap:
name: prowler-config
data:
varLibCni: "/var/lib/cni"
varLibEtcd: "/var/lib/etcd"
varLibKubelet: "/var/lib/kubelet"
varLibKubeScheduler: "/var/lib/kube-scheduler"
varLibKubeControllerManager: "/var/lib/kube-controller-manager"
etcSystemd: "/etc/systemd"
libSystemd: "/lib/systemd"
etcKubernetes: "/etc/kubernetes"
usrBin: "/usr/bin"
etcCniNetd: "/etc/cni/net.d"
optCniBin: "/opt/cni/bin"
srvKubernetes: "/srv/kubernetes"
clusterRole:
name: prowler-read-cluster
clusterRoleBinding:
name: prowler-read-cluster-binding
roleName: prowler-read-cluster

View File

@@ -21,7 +21,7 @@ print(
f"{Fore.GREEN}Loading all CSV files from the folder {folder_path_overview} ...\n{Style.RESET_ALL}"
)
cli.show_server_banner = lambda *x: click.echo(
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are using {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} with the S3 integration or that integration \nfrom {Fore.CYAN}{Style.BRIGHT}Prowler Open Source{Style.RESET_ALL} and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are a {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} customer and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
)
# Initialize the app - incorporate css

View File

@@ -319,7 +319,7 @@ Each Prowler check has metadata associated which is stored at the same level of
For the Remediation Code we use the following knowledge base to fill it:
- Official documentation for the provider
- https://docs.prowler.com/checks/checks-index
- https://docs.bridgecrew.io
- https://www.trendmicro.com/cloudoneconformity
- https://github.com/cloudmatos/matos/tree/master/remediations

View File

@@ -1,11 +1,7 @@
# Debugging
Debugging in Prowler make things easier!
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution.
## VSCode
In VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution. To do that, if you are using VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
The following file is an example of the [debugging configuration](https://code.visualstudio.com/docs/editor/debugging#_launch-configurations) file that you can add to [Virtual Studio Code](https://code.visualstudio.com/).
This file should inside the *.vscode* folder and its name has to be *launch.json*:
@@ -15,62 +11,31 @@ This file should inside the *.vscode* folder and its name has to be *launch.json
"version": "0.2.0",
"configurations": [
{
"name": "Debug AWS Check",
"type": "debugpy",
"name": "Python: Current File",
"type": "python",
"request": "launch",
"program": "prowler.py",
"args": [
"aws",
"-f",
"eu-west-1",
"--service",
"cloudwatch",
"--log-level",
"ERROR",
"-c",
"<check_name>"
"-p",
"dev",
],
"console": "integratedTerminal",
"justMyCode": false
},
{
"name": "Debug Azure Check",
"type": "debugpy",
"name": "Python: Debug Tests",
"type": "python",
"request": "launch",
"program": "prowler.py",
"args": [
"azure",
"--sp-env-auth",
"--log-level",
"ERROR",
"-c",
"<check_name>"
],
"console": "integratedTerminal",
"justMyCode": false
},
{
"name": "Debug GCP Check",
"type": "debugpy",
"request": "launch",
"program": "prowler.py",
"args": [
"gcp",
"--log-level",
"ERROR",
"-c",
"<check_name>"
],
"console": "integratedTerminal",
"justMyCode": false
},
{
"name": "Debug K8s Check",
"type": "debugpy",
"request": "launch",
"program": "prowler.py",
"args": [
"kubernetes",
"--log-level",
"ERROR",
"-c",
"<check_name>"
"program": "${file}",
"purpose": [
"debug-test"
],
"console": "integratedTerminal",
"justMyCode": false

View File

@@ -48,10 +48,7 @@ Before we merge any of your pull requests we pass checks to the code, we use the
You can see all dependencies in file `pyproject.toml`.
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) on the latest version to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
???+ note
If you have any trouble when committing to the Prowler repository, add the `--no-verify` flag to the `git commit` command.
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
## Pull Request Checklist

View File

@@ -23,8 +23,8 @@ The Prowler's service structure is the following and the way to initialise it is
All the Prowler provider's services inherits from a base class depending on the provider used.
- [AWS Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/lib/service/service.py)
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/lib/service/service.py)
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/service/service.py)
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/service/service.py)
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/lib/service/service.py)
- [Kubernetes Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/kubernetes/lib/service/service.py)
Each class is used to initialize the credentials and the API's clients to be used in the service. If some threading is used it must be coded there.

View File

@@ -40,10 +40,10 @@ If your IAM entity enforces MFA you can use `--mfa` and Prowler will ask you to
Prowler for Azure supports the following authentication types:
- [Service principal application](https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals?tabs=browser#service-principal-object) by environment variables (recommended)
- Service principal authentication by environment variables (Enterprise Application)
- Current az cli credentials stored
- Interactive browser authentication
- [Managed identity](https://learn.microsoft.com/en-us/entra/identity/managed-identities-azure-resources/overview) authentication
- Managed identity authentication
### Service Principal authentication
@@ -56,8 +56,6 @@ export AZURE_CLIENT_SECRET="XXXXXXX"
```
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
Follow the instructions in the [Create Prowler Service Principal](../tutorials/azure/create-prowler-service-principal.md) section to create a service principal.
### AZ CLI / Browser / Managed Identity authentication
The other three cases does not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options. To use `--browser-auth` the user needs to authenticate against Azure using the default browser to start the scan, also `tenant-id` is required.
@@ -66,22 +64,55 @@ The other three cases does not need additional configuration, `--az-cli-auth` an
To use each one you need to pass the proper flag to the execution. Prowler for Azure handles two types of permission scopes, which are:
- **Microsoft Entra ID permissions**: Used to retrieve metadata from the identity assumed by Prowler and specific Entra checks (not mandatory to have access to execute the tool). The permissions required by the tool are the following:
- **Microsoft Entra ID permissions**: Used to retrieve metadata from the identity assumed by Prowler (not mandatory to have access to execute the tool).
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool.
#### Microsoft Entra ID scope
Microsoft Entra ID (AAD earlier) permissions required by the tool are the following:
- `Directory.Read.All`
- `Policy.Read.All`
- `UserAuthenticationMethod.Read.All`
The best way to assign it is through the Azure web console:
1. Access to Microsoft Entra ID
2. In the left menu bar, go to "App registrations"
3. Once there, in the menu bar click on "+ New registration" to register a new application
4. Fill the "Name, select the "Supported account types" and click on "Register. You will be redirected to the applications page.
![Register an Application page](../img/register-application.png)
4. Select the new application
5. In the left menu bar, select "API permissions"
6. Then click on "+ Add a permission" and select "Microsoft Graph"
7. Once in the "Microsoft Graph" view, select "Application permissions"
8. Finally, search for "Directory", "Policy" and "UserAuthenticationMethod" select the following permissions:
- `Directory.Read.All`
- `Policy.Read.All`
- `UserAuthenticationMethod.Read.All`
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool. It is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
- `Reader`
- `ProwlerRole` (custom role defined in [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json))
![EntraID Permissions](../img/AAD-permissions.png)
To assign the permissions, follow the instructions in the [Microsoft Entra ID permissions](../tutorials/azure/create-prowler-service-principal.md#assigning-the-proper-permissions) section and the [Azure subscriptions permissions](../tutorials/azure/subscriptions.md#assigning-proper-permissions) section, respectively.
#### Checks that require ProwlerRole
#### Subscriptions scope
The following checks require the `ProwlerRole` custom role to be executed, if you want to run them, make sure you have assigned the role to the identity that is going to be assumed by Prowler:
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
- `app_function_access_keys_configured`
- `app_function_ftps_deployment_disabled`
- `Security Reader`
- `Reader`
To assign this roles, follow the instructions:
1. Access your subscription, then select your subscription.
2. Select "Access control (IAM)".
3. In the overview, select "Roles"
![IAM Page](../img/page-IAM.png)
4. Click on "+ Add" and select "Add role assignment"
5. In the search bar, type `Security Reader`, select it and click on "Next"
6. In the Members tab, click on "+ Select members" and add the members you want to assign this role.
7. Click on "Review + assign" to apply the new role.
*Repeat these steps for `Reader` role*
## Google Cloud

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 357 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 688 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 746 KiB

After

Width:  |  Height:  |  Size: 214 KiB

BIN
docs/img/page-IAM.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 348 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 552 KiB

View File

@@ -85,7 +85,7 @@ prowler --security-hub --region eu-west-1
```
???+ note
It is recommended to send only fails to Security Hub and that is possible adding `--status FAIL` to the command. You can use, instead of the `--status FAIL` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub.
It is recommended to send only fails to Security Hub and that is possible adding `-q/--quiet` to the command. You can use, instead of the `-q/--quiet` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub.
Since Prowler perform checks to all regions by default you may need to filter by region when running Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f/--region <region>` (if no region is used it will try to push findings in all regions hubs). Prowler will send findings to the Security Hub on the region where the scanned resource is located.
@@ -121,13 +121,13 @@ prowler --security-hub --role arn:aws:iam::123456789012:role/ProwlerExecutionRol
## Send only failed findings to Security Hub
When using the **AWS Security Hub** integration you can send only the `FAIL` findings generated by **Prowler**. Therefore, the **AWS Security Hub** usage costs eventually would be lower. To follow that recommendation you could add the `--status FAIL` flag to the Prowler command:
When using the **AWS Security Hub** integration you can send only the `FAIL` findings generated by **Prowler**. Therefore, the **AWS Security Hub** usage costs eventually would be lower. To follow that recommendation you could add the `-q/--quiet` flag to the Prowler command:
```sh
prowler --security-hub --status FAIL
prowler --security-hub --quiet
```
You can use, instead of the `--status FAIL` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub:
You can use, instead of the `-q/--quiet` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub:
```sh
prowler --security-hub --send-sh-only-fails

View File

@@ -1,34 +0,0 @@
# How to create Prowler Service Principal
To allow Prowler assume an identity to start the scan with the required privileges is necesary to create a Service Principal. To create one follow the next steps:
1. Access to Microsoft Entra ID
2. In the left menu bar, go to "App registrations"
3. Once there, in the menu bar click on "+ New registration" to register a new application
4. Fill the "Name, select the "Supported account types" and click on "Register. You will be redirected to the applications page.
5. Once in the application page, in the left menu bar, select "Certificates & secrets"
6. In the "Certificates & secrets" view, click on "+ New client secret"
7. Fill the "Description" and "Expires" fields and click on "Add"
8. Copy the value of the secret, it is going to be used as `AZURE_CLIENT_SECRET` environment variable.
![Register an Application page](../../img/create-sp.gif)
## Assigning the proper permissions
To allow Prowler to retrieve metadata from the identity assumed and specific Entra checks, it is needed to assign the following permissions:
1. Access to Microsoft Entra ID
2. In the left menu bar, go to "App registrations"
3. Once there, select the application that you have created
4. In the left menu bar, select "API permissions"
5. Then click on "+ Add a permission" and select "Microsoft Graph"
6. Once in the "Microsoft Graph" view, select "Application permissions"
7. Finally, search for "Directory", "Policy" and "UserAuthenticationMethod" select the following permissions:
- `Directory.Read.All`
- `Policy.Read.All`
- `UserAuthenticationMethod.Read.All`
8. Click on "Add permissions" to apply the new permissions.
9. Finally, click on "Grant admin consent for [your tenant]" to apply the permissions.
![EntraID Permissions](../../img/AAD-permissions.png)

View File

@@ -1,6 +1,6 @@
# Azure subscriptions scope
By default, Prowler is multisubscription, which means that is going to scan all the subscriptions is able to list. If you only assign permissions to one subscription, it is going to scan a single one.
By default, Prowler is multisubscription, which means that is going to scan all the subscriptions is able to list. If you only assign permissions to one subscription, it is going to scan a single one.
Prowler also has the ability to limit the subscriptions to scan to a set passed as input argument, to do so:
```console
@@ -8,36 +8,3 @@ prowler azure --az-cli-auth --subscription-ids <subscription ID 1> <subscription
```
Where you can pass from 1 up to N subscriptions to be scanned.
## Assigning proper permissions
Regarding the subscription scope, Prowler by default scans all subscriptions that it is able to list, so it is necessary to add the `Reader` RBAC built-in roles per subscription or management group (recommended for multiple subscriptions, see it in the [next section](#recommendation-for-multiple-subscriptions)) to the entity that will be adopted by the tool:
To assign this roles, follow the instructions:
1. Access your subscription, then select your subscription.
2. Select "Access control (IAM)".
3. In the overview, select "Roles".
4. Click on "+ Add" and select "Add role assignment".
5. In the search bar, type `Reader`, select it and click on "Next".
6. In the Members tab, click on "+ Select members" and add the members you want to assign this role.
7. Click on "Review + assign" to apply the new role.
![Add reader role to subscription](../../img/add-reader-role.gif)
Moreover, some additional read-only permissions are needed for some checks, for this kind of checks that are not covered by built-in roles we use a custom role. This role is defined in [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json). Once the cusotm role is created, repeat the steps mentioned above to assign the new `ProwlerRole` to an identity.
## Recommendation for multiple subscriptions
While scanning multiple subscriptions could be tedious to create and assign roles for each one. For this reason in Prowler we recommend the usage of *[management groups](https://learn.microsoft.com/en-us/azure/governance/management-groups/overview)* to group all subscriptions that are going to be audited by Prowler.
To do this in a proper way you have to [create a new management group](https://learn.microsoft.com/en-us/azure/governance/management-groups/create-management-group-portal) and add all roles in the same way that have been done for subscription scope.
![Create management group](../../img/create-management-group.gif)
Once the management group is properly set you can add all the subscription that you want to audit.
![Add subscription to management group](../../img/add-sub-to-management-group.gif)
???+ note
By default, `prowler` will scan all subscriptions in the Azure tenant, use the flag `--subscription-id` to specify the subscriptions to be scanned.

View File

@@ -43,7 +43,6 @@ The following list includes all the AWS checks with configurable variables that
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_interface_types` | List of Strings |
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_instance_owners` | List of Strings |
| `acm_certificates_expiration_check` | `days_to_expire_threshold` | Integer |
| `eks_control_plane_logging_all_types_enabled` | `eks_required_log_types` | List of Strings |
## Azure
@@ -356,18 +355,6 @@ aws:
# aws.acm_certificates_expiration_check
days_to_expire_threshold: 7
# AWS EKS Configuration
# aws.eks_control_plane_logging_all_types_enabled
# EKS control plane logging types that must be enabled
eks_required_log_types:
[
"api",
"audit",
"authenticator",
"controllerManager",
"scheduler",
]
# Azure Configuration
azure:
# Azure Network Configuration

View File

@@ -54,7 +54,7 @@ CustomChecksMetadata:
RelatedUrl: https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html
Remediation:
Code:
CLI: aws s3api put-bucket-versioning --bucket <bucket-name> --versioning-configuration Status=Enabled,MFADelete=Enabled
CLI: aws s3api put-bucket-versioning --bucket <bucket-name> --versioning-configuration Status=Enabled
NativeIaC: https://aws.amazon.com/es/s3/features/versioning/
Other: https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html
Terraform: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning

View File

@@ -81,7 +81,7 @@ def get_table(data):
## S3 Integration
If you are using Prowler SaaS with the S3 integration or that integration from Prowler Open Source and you want to use your data from your S3 bucket, you can run:
If you are a Prowler Saas customer and you want to use your data from your S3 bucket, you can run:
```sh
aws s3 cp s3://<your-bucket>/output/csv ./output --recursive

View File

@@ -25,17 +25,7 @@ Prowler will follow the same credentials search as [Google authentication librar
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
## Impersonate Service Account
If you want to impersonate a GCP service account, you can use the `--impersonate-service-account` argument:
```console
prowler gcp --impersonate-service-account <service-account-email>
```
This argument will use the default credentials to impersonate the service account provided.
## Service APIs
# GCP Service APIs
Prowler will use the Google Cloud APIs to get the information needed to perform the checks. Make sure that the following APIs are enabled in the project:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.5 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

After

Width:  |  Height:  |  Size: 24 KiB

View File

@@ -1,20 +0,0 @@
# In-Cluster Execution
For in-cluster execution, you can use the supplied yaml files inside `/kubernetes`:
* [job.yaml](https://github.com/prowler-cloud/prowler/blob/master/kubernetes/job.yaml)
* [prowler-role.yaml](https://github.com/prowler-cloud/prowler/blob/master/kubernetes/prowler-role.yaml)
* [prowler-rolebinding.yaml](https://github.com/prowler-cloud/prowler/blob/master/kubernetes/prowler-rolebinding.yaml)
They can be used to run Prowler as a job within a new Prowler namespace:
```console
kubectl apply -f kubernetes/job.yaml
kubectl apply -f kubernetes/prowler-role.yaml
kubectl apply -f kubernetes/prowler-rolebinding.yaml
kubectl get pods --namespace prowler-ns --> prowler-XXXXX
kubectl logs prowler-XXXXX --namespace prowler-ns
```
???+ note
By default, `prowler` will scan all namespaces in your active Kubernetes context. Use the [`--namespace`](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/kubernetes/namespace/) flag to specify the namespace(s) to be scanned.

View File

@@ -1,23 +0,0 @@
# Miscellaneous
## Context Filtering
Prowler will scan the active Kubernetes context by default.
To specify the Kubernetes context to be scanned, use the `--context` flag followed by the desired context name. For example:
```console
prowler --context my-context
```
This will ensure that Prowler scans the specified context/cluster for vulnerabilities and misconfigurations.
## Namespace Filtering
By default, `prowler` will scan all namespaces in the context you specify.
To specify the namespace(s) to be scanned, use the `--namespace` flag followed by the desired namespace(s) separated by spaces. For example:
```console
prowler --namespace namespace1 namespace2
```

View File

@@ -1,15 +0,0 @@
# Non in-cluster execution
For non in-cluster execution, you can provide the location of the [kubeconfig](https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/) file with the following argument:
```console
prowler kubernetes --kubeconfig-file /path/to/kubeconfig
```
???+ note
If no `--kubeconfig-file` is provided, Prowler will use the default KubeConfig file location (`~/.kube/config`).
???+ note
`prowler` will scan the active Kubernetes context by default. Use the [`--context`](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/kubernetes/context/) flag to specify the context to be scanned.
???+ note
By default, `prowler` will scan all namespaces in your active Kubernetes context. Use the [`--namespace`](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/kubernetes/namespace/) flag to specify the namespace(s) to be scanned.

View File

@@ -10,7 +10,7 @@ Execute Prowler in verbose mode (like in Version 2):
prowler <provider> --verbose
```
## Filter findings by status
Prowler can filter the findings by their status, so you can see only in the CLI and in the reports the findings with a specific status:
Prowler can filter the findings by their status:
```console
prowler <provider> --status [PASS, FAIL, MANUAL]
```

View File

@@ -7,147 +7,97 @@ Mutelist option works along with other options and will modify the output in the
- CSV: `muted` is `True`. The field `status` will keep the original status, `MANUAL`, `PASS` or `FAIL`, of the finding.
## How the Mutelist Works
The **Mutelist** uses both "AND" and "OR" logic to determine which resources, checks, regions, and tags should be muted. For each check, the Mutelist evaluates whether the account, region, and resource match the specified criteria using "AND" logic. If tags are specified, the Mutelist can apply either "AND" or "OR" logic.
If any of the criteria do not match, the check is not muted.
???+ note
Remember that mutelist can be used with regular expressions.
## Mutelist Specification
???+ note
- For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
- For GCP provider, the Account ID is the Project ID and the Region is the Zone.
- For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
The Mutelist file uses the [YAML](https://en.wikipedia.org/wiki/YAML) format with the following syntax:
```yaml
### Account, Check and/or Region can be * to apply for all the cases.
### Resources and tags are lists that can have either Regex or Keywords.
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
### For each check you can except Accounts, Regions, Resources and/or Tags.
########################### MUTELIST EXAMPLE ###########################
Mutelist:
Accounts:
"123456789012":
Checks:
"iam_user_hardware_mfa_enabled":
Regions:
- "us-east-1"
Resources:
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
"ec2_*":
Regions:
- "*"
Resources:
- "*" # Will ignore every EC2 check in every account and region
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test"
- "project=test" # This will mute every resource containing the string "test" and BOTH tags at the same time.
"*":
Regions:
- "*"
Resources:
- "test"
Tags: # This will mute every resource containing the string "test" and the ones that contain EITHER the `test=test` OR `project=test` OR `project=dev`
- "test=test|project=(test|dev)"
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # This will mute every resource containing the string "test" and the tags `test=test` and either `project=test` OR `project=stage` in every account and region.
- "project=test|project=stage"
"*":
Checks:
"s3_bucket_object_versioning":
Regions:
- "eu-west-1"
- "us-east-1"
Resources:
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
- ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
"ecs_task_definitions_no_environment_secrets":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Accounts:
- "0123456789012"
Regions:
- "eu-west-1"
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
"*":
Regions:
- "*"
Resources:
- "*"
Tags:
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
"123456789012":
Checks:
"*":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Resources:
- "test"
Tags:
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
```
### Account, Check, Region, Resource, and Tag
| Field | Description | Logic |
|----------|----------|----------|
| `<account_id>` | Use `*` to apply the mutelist to all accounts. | `ANDed` |
| `<check_name>` | The name of the Prowler check. Use `*` to apply the mutelist to all checks. | `ANDed` |
| `<region>` | The region identifier. Use `*` to apply the mutelist to all regions. | `ANDed` |
| `<resource>` | The resource identifier. Use `*` to apply the mutelist to all resources. | `ANDed` |
| `<tag>` | The tag value. | `ORed` |
## How to Use the Mutelist
To use the Mutelist, you need to specify the path to the Mutelist YAML file using the `-w` or `--mutelist-file` option when running Prowler:
You can use `-w`/`--mutelist-file` with the path of your mutelist yaml file:
```
prowler <provider> -w mutelist.yaml
```
Replace `<provider>` with the appropriate provider name.
## Mutelist YAML File Syntax
## Considerations
???+ note
For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
- The Mutelist can be used in combination with other Prowler options, such as the `--service` or `--checks` option, to further customize the scanning process.
- Make sure to review and update the Mutelist regularly to ensure it reflects the desired exclusions and remains up to date with your infrastructure.
???+ note
For GCP provider, the Account ID is the Project ID and the Region is the Zone.
???+ note
For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
The Mutelist file is a YAML file with the following syntax:
```yaml
### Account, Check and/or Region can be * to apply for all the cases.
### Resources and tags are lists that can have either Regex or Keywords.
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
### For each check you can except Accounts, Regions, Resources and/or Tags.
########################### MUTELIST EXAMPLE ###########################
Mutelist:
Accounts:
"123456789012":
Checks:
"iam_user_hardware_mfa_enabled":
Regions:
- "us-east-1"
Resources:
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
"ec2_*":
Regions:
- "*"
Resources:
- "*" # Will ignore every EC2 check in every account and region
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
"*":
Checks:
"s3_bucket_object_versioning":
Regions:
- "eu-west-1"
- "us-east-1"
Resources:
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
- ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
"ecs_task_definitions_no_environment_secrets":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Accounts:
- "0123456789012"
Regions:
- "eu-west-1"
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
"*":
Regions:
- "*"
Resources:
- "*"
Tags:
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
"123456789012":
Checks:
"*":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Resources:
- "test"
Tags:
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
```
## AWS Mutelist
### Mute specific AWS regions

View File

@@ -12,7 +12,7 @@ prowler <provider> --scan-unused-services
## Services that are ignored
### AWS
#### ACM
You can have certificates in ACM that are not in use by any AWS resource.
You can have certificates in ACM that is not in use by any AWS resource.
Prowler will check if every certificate is going to expire soon, if this certificate is not in use by default it is not going to be check if it is expired, is going to expire soon or it is good.
- `acm_certificates_expiration_check`

View File

@@ -83,14 +83,9 @@ nav:
- Authentication: tutorials/azure/authentication.md
- Non default clouds: tutorials/azure/use-non-default-cloud.md
- Subscriptions: tutorials/azure/subscriptions.md
- Create Prowler Service Principal: tutorials/azure/create-prowler-service-principal.md
- Google Cloud:
- Authentication: tutorials/gcp/authentication.md
- Projects: tutorials/gcp/projects.md
- Kubernetes:
- In-Cluster Execution: tutorials/kubernetes/in-cluster.md
- Non In-Cluster Execution: tutorials/kubernetes/outside-cluster.md
- Miscellaneous: tutorials/kubernetes/misc.md
- Developer Guide:
- Introduction: developer-guide/introduction.md
- Provider: developer-guide/provider.md

View File

@@ -58,29 +58,20 @@ Resources:
- 'account:Get*'
- 'appstream:Describe*'
- 'appstream:List*'
- 'backup:List*'
- 'cloudtrail:GetInsightSelectors'
- 'codeartifact:List*'
- 'codebuild:BatchGet*'
- 'cognito-idp:GetUserPoolMfaConfig'
- 'dlm:Get*'
- 'drs:Describe*'
- 'ds:Get*'
- 'ds:Describe*'
- 'ds:List*'
- 'dynamodb:GetResourcePolicy'
- 'ec2:GetEbsEncryptionByDefault'
- 'ec2:GetSnapshotBlockPublicAccessState'
- 'ec2:GetInstanceMetadataDefaults'
- 'ecr:Describe*'
- 'ecr:GetRegistryScanningConfiguration'
- 'elasticfilesystem:DescribeBackupPolicy'
- 'glue:GetConnections'
- 'glue:GetSecurityConfiguration*'
- 'glue:SearchTables'
- 'lambda:GetFunction*'
- 'logs:FilterLogEvents'
- 'lightsail:GetRelationalDatabases'
- 'macie2:GetMacieSession'
- 's3:GetAccountPublicAccessBlock'
- 'shield:DescribeProtection'
@@ -88,10 +79,8 @@ Resources:
- 'securityhub:BatchImportFindings'
- 'securityhub:GetFindings'
- 'ssm:GetDocument'
- 'ssm-incidents:List*'
- 'support:Describe*'
- 'tag:GetTagKeys'
- 'wellarchitected:List*'
Resource: '*'
- PolicyName: ProwlerScanRoleAdditionalViewPrivilegesApiGateway
PolicyDocument:

View File

@@ -16,10 +16,7 @@
"ds:Get*",
"ds:Describe*",
"ds:List*",
"dynamodb:GetResourcePolicy",
"ec2:GetEbsEncryptionByDefault",
"ec2:GetSnapshotBlockPublicAccessState",
"ec2:GetInstanceMetadataDefaults",
"ecr:Describe*",
"ecr:GetRegistryScanningConfiguration",
"elasticfilesystem:DescribeBackupPolicy",
@@ -28,7 +25,6 @@
"glue:SearchTables",
"lambda:GetFunction*",
"logs:FilterLogEvents",
"lightsail:GetRelationalDatabases",
"macie2:GetMacieSession",
"s3:GetAccountPublicAccessBlock",
"shield:DescribeProtection",

View File

@@ -1,20 +0,0 @@
{
"properties": {
"roleName": "ProwlerRole",
"description": "Role used for checks that require read-only access to Azure resources and are not covered by the Reader role.",
"assignableScopes": [
"/"
],
"permissions": [
{
"actions": [
"Microsoft.Web/sites/host/listkeys/action",
"Microsoft.Web/sites/config/list/Action"
],
"notActions": [],
"dataActions": [],
"notDataActions": []
}
]
}
}

535
poetry.lock generated
View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -149,13 +149,13 @@ files = [
[[package]]
name = "anyio"
version = "4.4.0"
version = "4.3.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
files = [
{file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"},
{file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"},
{file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
{file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
]
[package.dependencies]
@@ -171,13 +171,13 @@ trio = ["trio (>=0.23)"]
[[package]]
name = "astroid"
version = "3.2.4"
version = "3.2.2"
description = "An abstract syntax tree for Python with inference support."
optional = false
python-versions = ">=3.8.0"
files = [
{file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"},
{file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"},
{file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"},
{file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"},
]
[package.dependencies]
@@ -305,13 +305,13 @@ aio = ["aiohttp (>=3.0)"]
[[package]]
name = "azure-identity"
version = "1.17.1"
version = "1.16.1"
description = "Microsoft Azure Identity Library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "azure-identity-1.17.1.tar.gz", hash = "sha256:32ecc67cc73f4bd0595e4f64b1ca65cd05186f4fe6f98ed2ae9f1aa32646efea"},
{file = "azure_identity-1.17.1-py3-none-any.whl", hash = "sha256:db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382"},
{file = "azure-identity-1.16.1.tar.gz", hash = "sha256:6d93f04468f240d59246d8afde3091494a5040d4f141cad0f49fc0c399d0d91e"},
{file = "azure_identity-1.16.1-py3-none-any.whl", hash = "sha256:8fb07c25642cd4ac422559a8b50d3e77f73dcc2bbfaba419d06d6c9d7cff6726"},
]
[package.dependencies]
@@ -319,7 +319,6 @@ azure-core = ">=1.23.0"
cryptography = ">=2.5"
msal = ">=1.24.0"
msal-extensions = ">=0.3.0"
typing-extensions = ">=4.0.0"
[[package]]
name = "azure-keyvault-keys"
@@ -372,37 +371,35 @@ isodate = ">=0.6.1,<1.0.0"
[[package]]
name = "azure-mgmt-compute"
version = "32.0.0"
version = "31.0.0"
description = "Microsoft Azure Compute Management Client Library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "azure-mgmt-compute-32.0.0.tar.gz", hash = "sha256:8d5a86e0116c71a07bcedd8e69d2e09270db3880932656521f3143c6f9475072"},
{file = "azure_mgmt_compute-32.0.0-py3-none-any.whl", hash = "sha256:8578dbeee034a58c41331a71ddd2503e1e5c65a2cc233ebfe9adc5e16ca3d037"},
{file = "azure-mgmt-compute-31.0.0.tar.gz", hash = "sha256:5a5b1c4fc1a19ecb022a12ded1be8b1b155f6979d03fb9efc04642f606644bbf"},
{file = "azure_mgmt_compute-31.0.0-py3-none-any.whl", hash = "sha256:39cad123d814390cca6adbe02afe2269aa179b9051dc022c7f07134ebd416207"},
]
[package.dependencies]
azure-common = ">=1.1"
azure-mgmt-core = ">=1.3.2"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
[[package]]
name = "azure-mgmt-containerservice"
version = "31.0.0"
version = "30.0.0"
description = "Microsoft Azure Container Service Management Client Library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "azure-mgmt-containerservice-31.0.0.tar.gz", hash = "sha256:134358d7f88c4d29b4009f91d7619861e1fad5dbea5e147402dd61ad96b5624a"},
{file = "azure_mgmt_containerservice-31.0.0-py3-none-any.whl", hash = "sha256:75c6dbbaf27ed6c8251e34b86fefcfb001fe096c69df9ce93c163de71a2b5275"},
{file = "azure-mgmt-containerservice-30.0.0.tar.gz", hash = "sha256:6c62e6ac590e34fedd739fe24b31b3750713a014616696ea8d44c7bcc81c06b7"},
{file = "azure_mgmt_containerservice-30.0.0-py3-none-any.whl", hash = "sha256:795a6a50d6632344910216853167b9bd47b09d50cb2afa28b2a18e58f5088c3f"},
]
[package.dependencies]
azure-common = ">=1.1"
azure-mgmt-core = ">=1.3.2"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
[[package]]
name = "azure-mgmt-core"
@@ -420,13 +417,13 @@ azure-core = ">=1.26.2,<2.0.0"
[[package]]
name = "azure-mgmt-cosmosdb"
version = "9.5.1"
version = "9.5.0"
description = "Microsoft Azure Cosmos DB Management Client Library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "azure-mgmt-cosmosdb-9.5.1.tar.gz", hash = "sha256:4e55d3973f11cf02cf7a8055aee86615233d7cbe3bb227a07c176bac8bc7ef02"},
{file = "azure_mgmt_cosmosdb-9.5.1-py3-none-any.whl", hash = "sha256:860cad5583d63936e5f9477f7878a256c4dee039169cdea554da851f0762f0c7"},
{file = "azure-mgmt-cosmosdb-9.5.0.tar.gz", hash = "sha256:5d21a197de08b3638e09ad88e32da211f7bb598b7d7cfee48d61d57d69b3edd9"},
{file = "azure_mgmt_cosmosdb-9.5.0-py3-none-any.whl", hash = "sha256:9abdcda0c973c3afe92adf52f9e8d2ede0576e7ade384efd3f4ccd36475ab1a3"},
]
[package.dependencies]
@@ -436,20 +433,19 @@ isodate = ">=0.6.1"
[[package]]
name = "azure-mgmt-keyvault"
version = "10.3.1"
version = "10.3.0"
description = "Microsoft Azure Key Vault Management Client Library for Python"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.7"
files = [
{file = "azure-mgmt-keyvault-10.3.1.tar.gz", hash = "sha256:34b92956aefbdd571cae5a03f7078e037d8087b2c00cfa6748835dc73abb5a30"},
{file = "azure_mgmt_keyvault-10.3.1-py3-none-any.whl", hash = "sha256:a18a27a06551482d31f92bc43ac8b0846af02cd69511f80090865b4c5caa3c21"},
{file = "azure-mgmt-keyvault-10.3.0.tar.gz", hash = "sha256:183b4164cf1868b8ea7efeaa98edad7d2a4e14a9bd977c2818b12b75150cd2a2"},
{file = "azure_mgmt_keyvault-10.3.0-py3-none-any.whl", hash = "sha256:3410cf6c703e9570ed3c8e9716e483c02b1804adde6ab437ddc8feac4545acd6"},
]
[package.dependencies]
azure-common = ">=1.1"
azure-mgmt-core = ">=1.3.2"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
azure-common = ">=1.1,<2.0"
azure-mgmt-core = ">=1.3.2,<2.0.0"
isodate = ">=0.6.1,<1.0.0"
[[package]]
name = "azure-mgmt-monitor"
@@ -469,13 +465,13 @@ isodate = ">=0.6.1,<1.0.0"
[[package]]
name = "azure-mgmt-network"
version = "26.0.0"
version = "25.4.0"
description = "Microsoft Azure Network Management Client Library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "azure-mgmt-network-26.0.0.tar.gz", hash = "sha256:4de676184195053fdb106a6ea1042a894e70c731a6d3c8a633d52f5229f4ee1b"},
{file = "azure_mgmt_network-26.0.0-py3-none-any.whl", hash = "sha256:15d6a77d7429bdcc76f8482158845d42f9d9434529e1ad2b6778269cc14627e9"},
{file = "azure-mgmt-network-25.4.0.tar.gz", hash = "sha256:a338e62d81fdbf050f802143c28cb965b07edd43800ef0504cdfa6b8854d7554"},
{file = "azure_mgmt_network-25.4.0-py3-none-any.whl", hash = "sha256:ae30f9ff25c22e14e0394d432d7aebc06ac1c5bf4de24cf226972c12bd664035"},
]
[package.dependencies]
@@ -549,19 +545,19 @@ msrest = ">=0.6.21"
[[package]]
name = "azure-mgmt-storage"
version = "21.2.1"
version = "21.1.0"
description = "Microsoft Azure Storage Management Client Library for Python"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.7"
files = [
{file = "azure-mgmt-storage-21.2.1.tar.gz", hash = "sha256:503a7ff9c31254092b0656445f5728bfdfda2d09d46a82e97019eaa9a1ecec64"},
{file = "azure_mgmt_storage-21.2.1-py3-none-any.whl", hash = "sha256:f97df1fa39cde9dbacf2cd96c9cba1fc196932185e24853e276f74b18a0bd031"},
{file = "azure-mgmt-storage-21.1.0.tar.gz", hash = "sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162"},
{file = "azure_mgmt_storage-21.1.0-py3-none-any.whl", hash = "sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc"},
]
[package.dependencies]
azure-common = ">=1.1"
azure-mgmt-core = ">=1.3.2"
isodate = ">=0.6.1"
azure-common = ">=1.1,<2.0"
azure-mgmt-core = ">=1.3.2,<2.0.0"
isodate = ">=0.6.1,<1.0.0"
[[package]]
name = "azure-mgmt-subscription"
@@ -581,29 +577,29 @@ msrest = ">=0.7.1"
[[package]]
name = "azure-mgmt-web"
version = "7.3.0"
version = "7.2.0"
description = "Microsoft Azure Web Apps Management Client Library for Python"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.7"
files = [
{file = "azure-mgmt-web-7.3.0.tar.gz", hash = "sha256:2032bf4d50df0bbb822ea00cac3bfbe0e67487d2c9cc1182c0858f83149cdea9"},
{file = "azure_mgmt_web-7.3.0-py3-none-any.whl", hash = "sha256:0e53db3fea6eae0d68d9a94eef2e19301f80d3d8a8a20eab599b75384ac0296e"},
{file = "azure-mgmt-web-7.2.0.tar.gz", hash = "sha256:efcfe6f7f520ed0abcfe86517e1c8cf02a712f737a3db0db7cb46c6d647981ed"},
{file = "azure_mgmt_web-7.2.0-py3-none-any.whl", hash = "sha256:28e88602ad9d6d03ed3ba89f966f478b6148e28d292d165e5f371c92c59621df"},
]
[package.dependencies]
azure-common = ">=1.1"
azure-mgmt-core = ">=1.3.2"
isodate = ">=0.6.1"
azure-common = ">=1.1,<2.0"
azure-mgmt-core = ">=1.3.2,<2.0.0"
isodate = ">=0.6.1,<1.0.0"
[[package]]
name = "azure-storage-blob"
version = "12.21.0"
version = "12.20.0"
description = "Microsoft Azure Blob Storage Client Library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "azure-storage-blob-12.21.0.tar.gz", hash = "sha256:b9722725072f5b7373c0f4dd6d78fbae2bb37bffc5c3e01731ab8c750ee8dd7e"},
{file = "azure_storage_blob-12.21.0-py3-none-any.whl", hash = "sha256:f9ede187dd5a0ef296b583a7c1861c6938ddd6708d6e70f4203a163c2ab42d43"},
{file = "azure-storage-blob-12.20.0.tar.gz", hash = "sha256:eeb91256e41d4b5b9bad6a87fd0a8ade07dd58aa52344e2c8d2746e27a017d3b"},
{file = "azure_storage_blob-12.20.0-py3-none-any.whl", hash = "sha256:de6b3bf3a90e9341a6bcb96a2ebe981dffff993e9045818f6549afea827a52a9"},
]
[package.dependencies]
@@ -631,13 +627,13 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
[[package]]
name = "bandit"
version = "1.7.9"
version = "1.7.8"
description = "Security oriented static analyser for python code."
optional = false
python-versions = ">=3.8"
files = [
{file = "bandit-1.7.9-py3-none-any.whl", hash = "sha256:52077cb339000f337fb25f7e045995c4ad01511e716e5daac37014b9752de8ec"},
{file = "bandit-1.7.9.tar.gz", hash = "sha256:7c395a436743018f7be0a4cbb0a4ea9b902b6d87264ddecf8cfdc73b4f78ff61"},
{file = "bandit-1.7.8-py3-none-any.whl", hash = "sha256:509f7af645bc0cd8fd4587abc1a038fc795636671ee8204d502b933aee44f381"},
{file = "bandit-1.7.8.tar.gz", hash = "sha256:36de50f720856ab24a24dbaa5fee2c66050ed97c1477e0a1159deab1775eab6b"},
]
[package.dependencies]
@@ -712,17 +708,17 @@ files = [
[[package]]
name = "boto3"
version = "1.34.151"
version = "1.34.123"
description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "boto3-1.34.151-py3-none-any.whl", hash = "sha256:35bc76faacf1667d3fbb66c1966acf2230ef26206557efc26d9d9d79337bef43"},
{file = "boto3-1.34.151.tar.gz", hash = "sha256:30498a76b6f651ee2af7ae8edc1704379279ab8b91f1a8dd1f4ddf51259b0bc2"},
{file = "boto3-1.34.123-py3-none-any.whl", hash = "sha256:56bec52d485d5670ce96d53ae7b2cd4ae4e8a705fb2298a21093cdd77d642331"},
{file = "boto3-1.34.123.tar.gz", hash = "sha256:42b140fc850cf261ee4b1e8ef527fa071b1f1592a6d6a68d34b29f37cc46b4dd"},
]
[package.dependencies]
botocore = ">=1.34.151,<1.35.0"
botocore = ">=1.34.123,<1.35.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0"
@@ -731,13 +727,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.34.151"
version = "1.34.123"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
{file = "botocore-1.34.151-py3-none-any.whl", hash = "sha256:9018680d7d4a8060c26d127ceec5ab5b270879f423ea39b863d8a46f3e34c404"},
{file = "botocore-1.34.151.tar.gz", hash = "sha256:0d0968e427a94378f295b49d59170dad539938487ec948de3d030f06092ec6dc"},
{file = "botocore-1.34.123-py3-none-any.whl", hash = "sha256:8c34ada2a708c82e7174bff700611643db7ce2cb18f1130c35045c24310d299d"},
{file = "botocore-1.34.123.tar.gz", hash = "sha256:a8577f6574600c4d159b5cd103ee05744a443d77f7778304e17307940b369c4f"},
]
[package.dependencies]
@@ -764,13 +760,13 @@ files = [
[[package]]
name = "certifi"
version = "2024.7.4"
version = "2024.2.2"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
{file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
{file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
{file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
{file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
]
[[package]]
@@ -1003,63 +999,63 @@ files = [
[[package]]
name = "coverage"
version = "7.6.0"
version = "7.5.3"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"},
{file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"},
{file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"},
{file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"},
{file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"},
{file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"},
{file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"},
{file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"},
{file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"},
{file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"},
{file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"},
{file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"},
{file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"},
{file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"},
{file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"},
{file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"},
{file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"},
{file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"},
{file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"},
{file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"},
{file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"},
{file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"},
{file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"},
{file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"},
{file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"},
{file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"},
{file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"},
{file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"},
{file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"},
{file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"},
{file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"},
{file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"},
{file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"},
{file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"},
{file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"},
{file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"},
{file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"},
{file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"},
{file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"},
{file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"},
{file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"},
{file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"},
{file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"},
{file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"},
{file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"},
{file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"},
{file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"},
{file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"},
{file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"},
{file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"},
{file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"},
{file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"},
{file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"},
{file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"},
{file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"},
{file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"},
{file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"},
{file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"},
{file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"},
{file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"},
{file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"},
{file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"},
{file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"},
{file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"},
{file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"},
{file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"},
{file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"},
{file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"},
{file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"},
{file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"},
{file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"},
{file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"},
{file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"},
{file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"},
{file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"},
{file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"},
{file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"},
{file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"},
{file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"},
{file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"},
{file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"},
{file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"},
{file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"},
{file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"},
{file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"},
{file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"},
{file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"},
{file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"},
{file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"},
{file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"},
{file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"},
{file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"},
{file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"},
{file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"},
{file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"},
{file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"},
{file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"},
{file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"},
{file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"},
{file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"},
{file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"},
{file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"},
{file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"},
{file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"},
]
[package.dependencies]
@@ -1070,38 +1066,43 @@ toml = ["tomli"]
[[package]]
name = "cryptography"
version = "43.0.0"
version = "42.0.5"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
files = [
{file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"},
{file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"},
{file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"},
{file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"},
{file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"},
{file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"},
{file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"},
{file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"},
{file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"},
{file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"},
{file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"},
{file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"},
{file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"},
{file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"},
{file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"},
{file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"},
{file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"},
{file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"},
{file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"},
{file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"},
{file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"},
{file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"},
{file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"},
{file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"},
{file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"},
{file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"},
{file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"},
{file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"},
{file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"},
{file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"},
{file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"},
{file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"},
{file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"},
{file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"},
{file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"},
{file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"},
{file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"},
{file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"},
{file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"},
{file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"},
{file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"},
{file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"},
{file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"},
{file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"},
{file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"},
{file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"},
{file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"},
{file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"},
{file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"},
{file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"},
{file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"},
{file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"},
{file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"},
{file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"},
{file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"},
{file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"},
{file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"},
{file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"},
{file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"},
]
[package.dependencies]
@@ -1114,18 +1115,18 @@ nox = ["nox"]
pep8test = ["check-sdist", "click", "mypy", "ruff"]
sdist = ["build"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test-randomorder = ["pytest-randomly"]
[[package]]
name = "dash"
version = "2.17.1"
version = "2.17.0"
description = "A Python framework for building reactive web-apps. Developed by Plotly."
optional = false
python-versions = ">=3.8"
files = [
{file = "dash-2.17.1-py3-none-any.whl", hash = "sha256:3eefc9ac67003f93a06bc3e500cae0a6787c48e6c81f6f61514239ae2da414e4"},
{file = "dash-2.17.1.tar.gz", hash = "sha256:ee2d9c319de5dcc1314085710b72cd5fa63ff994d913bf72979b7130daeea28e"},
{file = "dash-2.17.0-py3-none-any.whl", hash = "sha256:2421569023b2cd46ea2d4b2c14fe72c71b7436527a3102219b2265fa361e7c67"},
{file = "dash-2.17.0.tar.gz", hash = "sha256:d065cd88771e45d0485993be0d27565e08918cb7edd18e31ee1c5b41252fc2fa"},
]
[package.dependencies]
@@ -1144,11 +1145,11 @@ Werkzeug = "<3.1"
[package.extras]
celery = ["celery[redis] (>=5.1.2)", "redis (>=3.5.3)"]
ci = ["black (==22.3.0)", "dash-dangerously-set-inner-html", "dash-flow-example (==0.0.5)", "flake8 (==7.0.0)", "flaky (==3.8.1)", "flask-talisman (==1.0.0)", "jupyterlab (<4.0.0)", "mimesis (<=11.1.0)", "mock (==4.0.3)", "numpy (<=1.26.3)", "openpyxl", "orjson (==3.10.3)", "pandas (>=1.4.0)", "pyarrow", "pylint (==3.0.3)", "pytest-mock", "pytest-rerunfailures", "pytest-sugar (==0.9.6)", "pyzmq (==25.1.2)", "xlrd (>=2.0.1)"]
ci = ["black (==22.3.0)", "dash-dangerously-set-inner-html", "dash-flow-example (==0.0.5)", "flake8 (==7.0.0)", "flaky (==3.8.1)", "flask-talisman (==1.0.0)", "jupyterlab (<4.0.0)", "mimesis (<=11.1.0)", "mock (==4.0.3)", "numpy (<=1.26.3)", "openpyxl", "orjson (==3.9.12)", "pandas (>=1.4.0)", "pyarrow", "pylint (==3.0.3)", "pytest-mock", "pytest-rerunfailures", "pytest-sugar (==0.9.6)", "pyzmq (==25.1.2)", "xlrd (>=2.0.1)"]
compress = ["flask-compress"]
dev = ["PyYAML (>=5.4.1)", "coloredlogs (>=15.0.1)", "fire (>=0.4.0)"]
diskcache = ["diskcache (>=5.2.1)", "multiprocess (>=0.70.12)", "psutil (>=5.8.0)"]
testing = ["beautifulsoup4 (>=4.8.2)", "cryptography", "dash-testing-stub (>=0.0.2)", "lxml (>=4.6.2)", "multiprocess (>=0.70.12)", "percy (>=2.0.2)", "psutil (>=5.8.0)", "pytest (>=6.0.2)", "requests[security] (>=2.21.0)", "selenium (>=3.141.0,<=4.2.0)", "waitress (>=1.4.4)"]
testing = ["beautifulsoup4 (>=4.8.2)", "cryptography (<3.4)", "dash-testing-stub (>=0.0.2)", "lxml (>=4.6.2)", "multiprocess (>=0.70.12)", "percy (>=2.0.2)", "psutil (>=5.8.0)", "pytest (>=6.0.2)", "requests[security] (>=2.21.0)", "selenium (>=3.141.0,<=4.2.0)", "waitress (>=1.4.4)"]
[[package]]
name = "dash-bootstrap-components"
@@ -1359,34 +1360,34 @@ testing = ["hatch", "pre-commit", "pytest", "tox"]
[[package]]
name = "filelock"
version = "3.12.4"
version = "3.13.4"
description = "A platform independent file lock."
optional = false
python-versions = ">=3.8"
files = [
{file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"},
{file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"},
{file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"},
{file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"},
]
[package.extras]
docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"]
typing = ["typing-extensions (>=4.7.1)"]
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
typing = ["typing-extensions (>=4.8)"]
[[package]]
name = "flake8"
version = "7.1.0"
version = "7.0.0"
description = "the modular source code checker: pep8 pyflakes and co"
optional = false
python-versions = ">=3.8.1"
files = [
{file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"},
{file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"},
{file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"},
{file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"},
]
[package.dependencies]
mccabe = ">=0.7.0,<0.8.0"
pycodestyle = ">=2.12.0,<2.13.0"
pycodestyle = ">=2.11.0,<2.12.0"
pyflakes = ">=3.2.0,<3.3.0"
[[package]]
@@ -1586,13 +1587,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
[[package]]
name = "google-api-python-client"
version = "2.139.0"
version = "2.132.0"
description = "Google API Client Library for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "google_api_python_client-2.139.0-py2.py3-none-any.whl", hash = "sha256:1850a92505d91a82e2ca1635ab2b8dff179f4b67082c2651e1db332e8039840c"},
{file = "google_api_python_client-2.139.0.tar.gz", hash = "sha256:ed4bc3abe2c060a87412465b4e8254620bbbc548eefc5388e2c5ff912d36a68b"},
{file = "google-api-python-client-2.132.0.tar.gz", hash = "sha256:d6340dc83b72d72333cee5d50f7dcfecbff66a8783164090e945f985ec4c374d"},
{file = "google_api_python_client-2.132.0-py2.py3-none-any.whl", hash = "sha256:cde87700bd4d37f39f5e940292c1c6cd0910990b5b01f50b1332a8cea38e8595"},
]
[package.dependencies]
@@ -1604,13 +1605,13 @@ uritemplate = ">=3.0.1,<5"
[[package]]
name = "google-auth"
version = "2.30.0"
version = "2.29.0"
description = "Google Authentication Library"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-auth-2.30.0.tar.gz", hash = "sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688"},
{file = "google_auth-2.30.0-py2.py3-none-any.whl", hash = "sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5"},
{file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"},
{file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"},
]
[package.dependencies]
@@ -1998,13 +1999,13 @@ files = [
[[package]]
name = "jsonschema"
version = "4.23.0"
version = "4.22.0"
description = "An implementation of JSON Schema validation for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"},
{file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"},
{file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"},
{file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"},
]
[package.dependencies]
@@ -2015,7 +2016,7 @@ rpds-py = ">=0.7.1"
[package.extras]
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"]
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
[[package]]
name = "jsonschema-path"
@@ -2064,13 +2065,13 @@ six = "*"
[[package]]
name = "kubernetes"
version = "30.1.0"
version = "29.0.0"
description = "Kubernetes python client"
optional = false
python-versions = ">=3.6"
files = [
{file = "kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d"},
{file = "kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc"},
{file = "kubernetes-29.0.0-py2.py3-none-any.whl", hash = "sha256:ab8cb0e0576ccdfb71886366efb102c6a20f268d817be065ce7f9909c631e43e"},
{file = "kubernetes-29.0.0.tar.gz", hash = "sha256:c4812e227ae74d07d53c88293e564e54b850452715a59a927e7e1bc6b9a60459"},
]
[package.dependencies]
@@ -2513,13 +2514,13 @@ test = ["pytest", "pytest-cov"]
[[package]]
name = "moto"
version = "5.0.11"
version = "5.0.9"
description = ""
optional = false
python-versions = ">=3.8"
files = [
{file = "moto-5.0.11-py2.py3-none-any.whl", hash = "sha256:bdba9bec0afcde9f99b58c5271d6458dbfcda0a0a1e9beaecd808d2591db65ea"},
{file = "moto-5.0.11.tar.gz", hash = "sha256:606b641f4c6ef69f28a84147d6d6806d052011e7ae7b0fe46ae8858e7a27a0a3"},
{file = "moto-5.0.9-py2.py3-none-any.whl", hash = "sha256:21a13e02f83d6a18cfcd99949c96abb2e889f4bd51c4c6a3ecc8b78765cb854e"},
{file = "moto-5.0.9.tar.gz", hash = "sha256:eb71f1cba01c70fff1f16086acb24d6d9aeb32830d646d8989f98a29aeae24ba"},
]
[package.dependencies]
@@ -2646,25 +2647,25 @@ dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
[[package]]
name = "msgraph-sdk"
version = "1.5.3"
version = "1.4.0"
description = "The Microsoft Graph Python SDK"
optional = false
python-versions = ">=3.8"
files = [
{file = "msgraph_sdk-1.5.3-py3-none-any.whl", hash = "sha256:47aecbeb3bdb76fcfb4b7535aa5defc336249c42b0d7cbb9480244045449cb12"},
{file = "msgraph_sdk-1.5.3.tar.gz", hash = "sha256:34dd5a9a55287cfc8a3d5e301acdf65e46462011d451f2992fdbf31851a94e8f"},
{file = "msgraph_sdk-1.4.0-py3-none-any.whl", hash = "sha256:24f99082475ea129c3d45e44269bd64a7c6bfef8dda4f8ea692bbc9e47b71b78"},
{file = "msgraph_sdk-1.4.0.tar.gz", hash = "sha256:715907272c240e579d7669a690504488e25ae15fec904e2918c49ca328dc4a14"},
]
[package.dependencies]
azure-identity = ">=1.12.0"
microsoft-kiota-abstractions = ">=1.3.0,<2.0.0"
microsoft-kiota-abstractions = ">=1.0.0,<2.0.0"
microsoft-kiota-authentication-azure = ">=1.0.0,<2.0.0"
microsoft-kiota-http = ">=1.0.0,<2.0.0"
microsoft-kiota-serialization-form = ">=0.1.0"
microsoft-kiota-serialization-json = ">=1.0.0,<2.0.0"
microsoft-kiota-serialization-multipart = ">=0.1.0"
microsoft-kiota-serialization-text = ">=1.0.0,<2.0.0"
msgraph_core = ">=1.0.0"
msgraph-core = ">=1.0.0"
[package.extras]
dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
@@ -2842,56 +2843,47 @@ test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"]
[[package]]
name = "numpy"
version = "2.0.1"
version = "1.26.4"
description = "Fundamental package for array computing in Python"
optional = false
python-versions = ">=3.9"
files = [
{file = "numpy-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fbb536eac80e27a2793ffd787895242b7f18ef792563d742c2d673bfcb75134"},
{file = "numpy-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69ff563d43c69b1baba77af455dd0a839df8d25e8590e79c90fcbe1499ebde42"},
{file = "numpy-2.0.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1b902ce0e0a5bb7704556a217c4f63a7974f8f43e090aff03fcf262e0b135e02"},
{file = "numpy-2.0.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:f1659887361a7151f89e79b276ed8dff3d75877df906328f14d8bb40bb4f5101"},
{file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4658c398d65d1b25e1760de3157011a80375da861709abd7cef3bad65d6543f9"},
{file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4127d4303b9ac9f94ca0441138acead39928938660ca58329fe156f84b9f3015"},
{file = "numpy-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5eeca8067ad04bc8a2a8731183d51d7cbaac66d86085d5f4766ee6bf19c7f87"},
{file = "numpy-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9adbd9bb520c866e1bfd7e10e1880a1f7749f1f6e5017686a5fbb9b72cf69f82"},
{file = "numpy-2.0.1-cp310-cp310-win32.whl", hash = "sha256:7b9853803278db3bdcc6cd5beca37815b133e9e77ff3d4733c247414e78eb8d1"},
{file = "numpy-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81b0893a39bc5b865b8bf89e9ad7807e16717f19868e9d234bdaf9b1f1393868"},
{file = "numpy-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75b4e316c5902d8163ef9d423b1c3f2f6252226d1aa5cd8a0a03a7d01ffc6268"},
{file = "numpy-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e4eeb6eb2fced786e32e6d8df9e755ce5be920d17f7ce00bc38fcde8ccdbf9e"},
{file = "numpy-2.0.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a1e01dcaab205fbece13c1410253a9eea1b1c9b61d237b6fa59bcc46e8e89343"},
{file = "numpy-2.0.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8fc2de81ad835d999113ddf87d1ea2b0f4704cbd947c948d2f5513deafe5a7b"},
{file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a3d94942c331dd4e0e1147f7a8699a4aa47dffc11bf8a1523c12af8b2e91bbe"},
{file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15eb4eca47d36ec3f78cde0a3a2ee24cf05ca7396ef808dda2c0ddad7c2bde67"},
{file = "numpy-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b83e16a5511d1b1f8a88cbabb1a6f6a499f82c062a4251892d9ad5d609863fb7"},
{file = "numpy-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f87fec1f9bc1efd23f4227becff04bd0e979e23ca50cc92ec88b38489db3b55"},
{file = "numpy-2.0.1-cp311-cp311-win32.whl", hash = "sha256:36d3a9405fd7c511804dc56fc32974fa5533bdeb3cd1604d6b8ff1d292b819c4"},
{file = "numpy-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:08458fbf403bff5e2b45f08eda195d4b0c9b35682311da5a5a0a0925b11b9bd8"},
{file = "numpy-2.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bf4e6f4a2a2e26655717a1983ef6324f2664d7011f6ef7482e8c0b3d51e82ac"},
{file = "numpy-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6fddc5fe258d3328cd8e3d7d3e02234c5d70e01ebe377a6ab92adb14039cb4"},
{file = "numpy-2.0.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5daab361be6ddeb299a918a7c0864fa8618af66019138263247af405018b04e1"},
{file = "numpy-2.0.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:ea2326a4dca88e4a274ba3a4405eb6c6467d3ffbd8c7d38632502eaae3820587"},
{file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529af13c5f4b7a932fb0e1911d3a75da204eff023ee5e0e79c1751564221a5c8"},
{file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6790654cb13eab303d8402354fabd47472b24635700f631f041bd0b65e37298a"},
{file = "numpy-2.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbab9fc9c391700e3e1287666dfd82d8666d10e69a6c4a09ab97574c0b7ee0a7"},
{file = "numpy-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d0d92a5e3613c33a5f01db206a33f8fdf3d71f2912b0de1739894668b7a93b"},
{file = "numpy-2.0.1-cp312-cp312-win32.whl", hash = "sha256:173a00b9995f73b79eb0191129f2455f1e34c203f559dd118636858cc452a1bf"},
{file = "numpy-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:bb2124fdc6e62baae159ebcfa368708867eb56806804d005860b6007388df171"},
{file = "numpy-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc085b28d62ff4009364e7ca34b80a9a080cbd97c2c0630bb5f7f770dae9414"},
{file = "numpy-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fae4ebbf95a179c1156fab0b142b74e4ba4204c87bde8d3d8b6f9c34c5825ef"},
{file = "numpy-2.0.1-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:72dc22e9ec8f6eaa206deb1b1355eb2e253899d7347f5e2fae5f0af613741d06"},
{file = "numpy-2.0.1-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:ec87f5f8aca726117a1c9b7083e7656a9d0d606eec7299cc067bb83d26f16e0c"},
{file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f682ea61a88479d9498bf2091fdcd722b090724b08b31d63e022adc063bad59"},
{file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8efc84f01c1cd7e34b3fb310183e72fcdf55293ee736d679b6d35b35d80bba26"},
{file = "numpy-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3fdabe3e2a52bc4eff8dc7a5044342f8bd9f11ef0934fcd3289a788c0eb10018"},
{file = "numpy-2.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:24a0e1befbfa14615b49ba9659d3d8818a0f4d8a1c5822af8696706fbda7310c"},
{file = "numpy-2.0.1-cp39-cp39-win32.whl", hash = "sha256:f9cf5ea551aec449206954b075db819f52adc1638d46a6738253a712d553c7b4"},
{file = "numpy-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:e9e81fa9017eaa416c056e5d9e71be93d05e2c3c2ab308d23307a8bc4443c368"},
{file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:61728fba1e464f789b11deb78a57805c70b2ed02343560456190d0501ba37b0f"},
{file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:12f5d865d60fb9734e60a60f1d5afa6d962d8d4467c120a1c0cda6eb2964437d"},
{file = "numpy-2.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eacf3291e263d5a67d8c1a581a8ebbcfd6447204ef58828caf69a5e3e8c75990"},
{file = "numpy-2.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2c3a346ae20cfd80b6cfd3e60dc179963ef2ea58da5ec074fd3d9e7a1e7ba97f"},
{file = "numpy-2.0.1.tar.gz", hash = "sha256:485b87235796410c3519a699cfe1faab097e509e90ebb05dcd098db2ae87e7b3"},
{file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
{file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
{file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"},
{file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"},
{file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"},
{file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"},
{file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"},
{file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"},
{file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"},
{file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"},
{file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"},
{file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"},
{file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"},
{file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"},
{file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"},
{file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"},
{file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"},
{file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"},
{file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"},
{file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"},
{file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"},
{file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"},
{file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"},
{file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"},
{file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"},
{file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"},
{file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"},
{file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"},
{file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"},
{file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"},
{file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"},
{file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"},
{file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"},
{file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"},
{file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"},
{file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"},
]
[[package]]
@@ -3380,13 +3372,13 @@ pyasn1 = ">=0.4.6,<0.7.0"
[[package]]
name = "pycodestyle"
version = "2.12.0"
version = "2.11.1"
description = "Python style guide checker"
optional = false
python-versions = ">=3.8"
files = [
{file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"},
{file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"},
{file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"},
{file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"},
]
[[package]]
@@ -3500,17 +3492,17 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
[[package]]
name = "pylint"
version = "3.2.6"
version = "3.2.3"
description = "python code static checker"
optional = false
python-versions = ">=3.8.0"
files = [
{file = "pylint-3.2.6-py3-none-any.whl", hash = "sha256:03c8e3baa1d9fb995b12c1dbe00aa6c4bcef210c2a2634374aedeb22fb4a8f8f"},
{file = "pylint-3.2.6.tar.gz", hash = "sha256:a5d01678349454806cff6d886fb072294f56a58c4761278c97fb557d708e1eb3"},
{file = "pylint-3.2.3-py3-none-any.whl", hash = "sha256:b3d7d2708a3e04b4679e02d99e72329a8b7ee8afb8d04110682278781f889fa8"},
{file = "pylint-3.2.3.tar.gz", hash = "sha256:02f6c562b215582386068d52a30f520d84fdbcf2a95fc7e855b816060d048b60"},
]
[package.dependencies]
astroid = ">=3.2.4,<=3.3.0-dev0"
astroid = ">=3.2.2,<=3.3.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = [
{version = ">=0.2", markers = "python_version < \"3.11\""},
@@ -3562,13 +3554,13 @@ diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pytest"
version = "8.3.2"
version = "8.2.2"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"},
{file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"},
{file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"},
{file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"},
]
[package.dependencies]
@@ -3576,7 +3568,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=1.5,<2"
pluggy = ">=1.5,<2.0"
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras]
@@ -3894,13 +3886,13 @@ files = [
[[package]]
name = "requests"
version = "2.32.3"
version = "2.32.0"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
files = [
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
{file = "requests-2.32.0-py3-none-any.whl", hash = "sha256:f2c3881dddb70d056c5bd7600a4fae312b2a300e39be6a118d30b90bd27262b5"},
{file = "requests-2.32.0.tar.gz", hash = "sha256:fa5490319474c82ef1d2c9bc459d3652e3ae4ef4c4ebdd18a21145a47ca4b6b8"},
]
[package.dependencies]
@@ -4228,20 +4220,19 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
[[package]]
name = "safety"
version = "3.2.4"
version = "3.2.3"
description = "Checks installed dependencies for known vulnerabilities and licenses."
optional = false
python-versions = ">=3.7"
files = [
{file = "safety-3.2.4-py3-none-any.whl", hash = "sha256:242ff7ae448d7fb2ea455c90f44e3f2ca45be9c8559b2fe9dfc89617164a0f17"},
{file = "safety-3.2.4.tar.gz", hash = "sha256:bac0202016d736a2118057964a0e3983fa20ff2563fd103cac3f3ac1ed3fea11"},
{file = "safety-3.2.3-py3-none-any.whl", hash = "sha256:cda1e91749f610337a18b7f21f78267c127e44ebbbbcbbd419c83284279a5024"},
{file = "safety-3.2.3.tar.gz", hash = "sha256:414154934f1727daf8a6473493944fecb380540c3f00875dc1ae377382f7d83f"},
]
[package.dependencies]
Authlib = ">=1.2.0"
Click = ">=8.0.2"
dparse = ">=0.6.4b0"
filelock = ">=3.12.2,<3.13.0"
jinja2 = ">=3.1.0"
marshmallow = ">=3.15.0"
packaging = ">=21.0"
@@ -4306,18 +4297,19 @@ files = [
[[package]]
name = "setuptools"
version = "70.0.0"
version = "69.5.1"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
{file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"},
{file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"},
{file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"},
{file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "shellingham"
@@ -4361,13 +4353,13 @@ files = [
[[package]]
name = "slack-sdk"
version = "3.31.0"
version = "3.28.0"
description = "The Slack API Platform SDK for Python"
optional = false
python-versions = ">=3.6"
files = [
{file = "slack_sdk-3.31.0-py2.py3-none-any.whl", hash = "sha256:a120cc461e8ebb7d9175f171dbe0ded37a6878d9f7b96b28e4bad1227399047b"},
{file = "slack_sdk-3.31.0.tar.gz", hash = "sha256:740d2f9c49cbfcbd46fca56b4be9d527934c225312aac18fd2c0fca0ef6bc935"},
{file = "slack_sdk-3.28.0-py2.py3-none-any.whl", hash = "sha256:1a47700ae20566575ce494d1d1b6f594b011d06aad28e3b8e28c052cad1d6c4c"},
{file = "slack_sdk-3.28.0.tar.gz", hash = "sha256:e6ece5cb70850492637e002e3b0d26d307939f4a33203b88cb274f7475c9a144"},
]
[package.extras]
@@ -4574,13 +4566,13 @@ files = [
[[package]]
name = "urllib3"
version = "1.26.19"
version = "1.26.18"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
{file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"},
{file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"},
{file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"},
{file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"},
]
[package.extras]
@@ -4588,6 +4580,23 @@ brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotl
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "urllib3"
version = "2.2.1"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
files = [
{file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
{file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "vulture"
version = "2.11"
@@ -4882,20 +4891,20 @@ multidict = ">=4.0"
[[package]]
name = "zipp"
version = "3.19.1"
version = "3.18.1"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
files = [
{file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"},
{file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"},
{file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"},
{file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"},
]
[package.extras]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.13"
content-hash = "324ea427d651cea1513f4a7be9b86f420eb75efcd4e54e7021835e517cd81525"
content-hash = "6e50f55942ded1ce410d4119d25807608dd602b3c28c53b28602b42682841a59"

View File

@@ -6,13 +6,7 @@ from os import environ
from colorama import Fore, Style
from prowler.config.config import (
csv_file_suffix,
get_available_compliance_frameworks,
html_file_suffix,
json_asff_file_suffix,
json_ocsf_file_suffix,
)
from prowler.config.config import get_available_compliance_frameworks
from prowler.lib.banner import print_banner
from prowler.lib.check.check import (
bulk_load_checks_metadata,
@@ -42,32 +36,19 @@ from prowler.lib.check.custom_checks_metadata import (
)
from prowler.lib.cli.parser import ProwlerArgumentParser
from prowler.lib.logger import logger, set_logging_config
from prowler.lib.outputs.asff.asff import ASFF
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
AWSWellArchitected,
)
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS
from prowler.lib.outputs.compliance.compliance import display_compliance_table
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
AzureMitreAttack,
)
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_gcp import GCPMitreAttack
from prowler.lib.outputs.csv.csv import CSV
from prowler.lib.outputs.finding import Finding
from prowler.lib.outputs.html.html import HTML
from prowler.lib.outputs.ocsf.ocsf import OCSF
from prowler.lib.outputs.html.html import add_html_footer, fill_html_overview_statistics
from prowler.lib.outputs.json.json import close_json
from prowler.lib.outputs.outputs import extract_findings_statistics
from prowler.lib.outputs.slack.slack import Slack
from prowler.lib.outputs.summary_table import display_summary_table
from prowler.providers.aws.lib.s3.s3 import S3
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
from prowler.providers.aws.lib.s3.s3 import send_to_s3_bucket
from prowler.providers.aws.lib.security_hub.security_hub import (
batch_send_to_security_hub,
prepare_security_hub_findings,
resolve_security_hub_previous_findings,
verify_security_hub_integration_enabled_per_region,
)
from prowler.providers.common.provider import Provider
from prowler.providers.common.quick_inventory import run_provider_quick_inventory
@@ -199,17 +180,7 @@ def prowler():
# Import custom checks from folder
if checks_folder:
custom_checks = parse_checks_from_folder(global_provider, checks_folder)
# Workaround to be able to execute custom checks alongside all checks if nothing is explicitly set
if (
not checks_file
and not checks
and not services
and not severities
and not compliance_framework
and not categories
):
checks_to_execute.update(custom_checks)
parse_checks_from_folder(global_provider, checks_folder)
# Exclude checks if -e/--excluded-checks
if excluded_checks:
@@ -299,321 +270,97 @@ def prowler():
)
sys.exit(1)
# Outputs
# TODO: this part is needed since the checks generates a Check_Report_XXX and the output uses Finding
# This will be refactored for the outputs generate directly the Finding
finding_outputs = [
Finding.generate_output(global_provider, finding) for finding in findings
]
generated_outputs = {"regular": [], "compliance": []}
if args.output_formats:
for mode in args.output_formats:
filename = (
f"{global_provider.output_options.output_directory}/"
f"{global_provider.output_options.output_filename}"
)
if mode == "csv":
csv_output = CSV(
findings=finding_outputs,
create_file_descriptor=True,
file_path=f"{filename}{csv_file_suffix}",
)
generated_outputs["regular"].append(csv_output)
# Write CSV Finding Object to file
csv_output.batch_write_data_to_file()
if mode == "json-asff":
asff_output = ASFF(
findings=finding_outputs,
create_file_descriptor=True,
file_path=f"{filename}{json_asff_file_suffix}",
)
generated_outputs["regular"].append(asff_output)
# Write ASFF Finding Object to file
asff_output.batch_write_data_to_file()
if mode == "json-ocsf":
json_output = OCSF(
findings=finding_outputs,
create_file_descriptor=True,
file_path=f"{filename}{json_ocsf_file_suffix}",
)
generated_outputs["regular"].append(json_output)
json_output.batch_write_data_to_file()
if mode == "html":
html_output = HTML(
findings=finding_outputs,
create_file_descriptor=True,
file_path=f"{filename}{html_file_suffix}",
)
generated_outputs["regular"].append(html_output)
html_output.batch_write_data_to_file(
provider=global_provider, stats=stats
# Close json file if exists
if "json" in mode:
close_json(
global_provider.output_options.output_filename,
global_provider.output_options.output_directory,
mode,
)
# Compliance Frameworks
input_compliance_frameworks = set(
global_provider.output_options.output_modes
).intersection(get_available_compliance_frameworks(provider))
if provider == "aws":
for compliance_name in input_compliance_frameworks:
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
if "html" in mode:
add_html_footer(
global_provider.output_options.output_filename,
global_provider.output_options.output_directory,
)
cis = AWSCIS(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
fill_html_overview_statistics(
stats,
global_provider.output_options.output_filename,
global_provider.output_options.output_directory,
)
generated_outputs["compliance"].append(cis)
cis.batch_write_data_to_file()
elif compliance_name == "mitre_attack_aws":
# Generate MITRE ATT&CK Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
mitre_attack = AWSMitreAttack(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(mitre_attack)
mitre_attack.batch_write_data_to_file()
elif compliance_name.startswith("ens_"):
# Generate ENS Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
ens = AWSENS(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(ens)
ens.batch_write_data_to_file()
elif compliance_name.startswith("aws_well_architected_framework"):
# Generate AWS Well-Architected Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
aws_well_architected = AWSWellArchitected(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(aws_well_architected)
aws_well_architected.batch_write_data_to_file()
elif compliance_name.startswith("iso27001_"):
# Generate ISO27001 Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
iso27001 = AWSISO27001(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(iso27001)
iso27001.batch_write_data_to_file()
else:
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(generic_compliance)
generic_compliance.batch_write_data_to_file()
elif provider == "azure":
for compliance_name in input_compliance_frameworks:
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
# Send output to S3 if needed (-B / -D)
if provider == "aws" and (
args.output_bucket or args.output_bucket_no_assume
):
output_bucket = args.output_bucket
bucket_session = global_provider.session.current_session
# Check if -D was input
if args.output_bucket_no_assume:
output_bucket = args.output_bucket_no_assume
bucket_session = global_provider.session.original_session
send_to_s3_bucket(
global_provider.output_options.output_filename,
args.output_directory,
mode,
output_bucket,
bucket_session,
)
cis = AzureCIS(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(cis)
cis.batch_write_data_to_file()
elif compliance_name == "mitre_attack_azure":
# Generate MITRE ATT&CK Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
mitre_attack = AzureMitreAttack(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(mitre_attack)
mitre_attack.batch_write_data_to_file()
else:
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(generic_compliance)
generic_compliance.batch_write_data_to_file()
elif provider == "gcp":
for compliance_name in input_compliance_frameworks:
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
cis = GCPCIS(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(cis)
cis.batch_write_data_to_file()
elif compliance_name == "mitre_attack_gcp":
# Generate MITRE ATT&CK Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
mitre_attack = GCPMitreAttack(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(mitre_attack)
mitre_attack.batch_write_data_to_file()
else:
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(generic_compliance)
generic_compliance.batch_write_data_to_file()
elif provider == "kubernetes":
for compliance_name in input_compliance_frameworks:
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
cis = KubernetesCIS(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(cis)
cis.batch_write_data_to_file()
else:
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(generic_compliance)
generic_compliance.batch_write_data_to_file()
# AWS Security Hub Integration
if provider == "aws":
# Send output to S3 if needed (-B / -D) for all the output formats
if args.output_bucket or args.output_bucket_no_assume:
output_bucket = args.output_bucket
bucket_session = global_provider.session.current_session
# Check if -D was input
if args.output_bucket_no_assume:
output_bucket = args.output_bucket_no_assume
bucket_session = global_provider.session.original_session
s3 = S3(
session=bucket_session,
bucket_name=output_bucket,
output_directory=args.output_directory,
)
s3.send_to_bucket(generated_outputs)
if args.security_hub:
if provider == "aws" and args.security_hub:
print(
f"{Style.BRIGHT}\nSending findings to AWS Security Hub, please wait...{Style.RESET_ALL}"
)
# Verify where AWS Security Hub is enabled
aws_security_enabled_regions = []
security_hub_regions = (
global_provider.get_available_aws_service_regions("securityhub")
if not global_provider.identity.audited_regions
else global_provider.identity.audited_regions
)
for region in security_hub_regions:
# Save the regions where AWS Security Hub is enabled
if verify_security_hub_integration_enabled_per_region(
global_provider.identity.partition,
region,
global_provider.session.current_session,
global_provider.identity.account,
):
aws_security_enabled_regions.append(region)
# Prepare the findings to be sent to Security Hub
security_hub_findings_per_region = prepare_security_hub_findings(
findings,
global_provider,
global_provider.output_options,
aws_security_enabled_regions,
)
# Send the findings to Security Hub
findings_sent_to_security_hub = batch_send_to_security_hub(
security_hub_findings_per_region, global_provider.session.current_session
)
print(
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_sent_to_security_hub} findings sent to AWS Security Hub!{Style.RESET_ALL}"
)
# Resolve previous fails of Security Hub
if not args.skip_sh_update:
print(
f"{Style.BRIGHT}\nSending findings to AWS Security Hub, please wait...{Style.RESET_ALL}"
f"{Style.BRIGHT}\nArchiving previous findings in AWS Security Hub, please wait...{Style.RESET_ALL}"
)
security_hub_regions = (
global_provider.get_available_aws_service_regions("securityhub")
if not global_provider.identity.audited_regions
else global_provider.identity.audited_regions
findings_archived_in_security_hub = resolve_security_hub_previous_findings(
security_hub_findings_per_region,
global_provider,
)
security_hub = SecurityHub(
aws_account_id=global_provider.identity.account,
aws_partition=global_provider.identity.partition,
aws_session=global_provider.session.current_session,
findings=asff_output.data,
send_only_fails=global_provider.output_options.send_sh_only_fails,
aws_security_hub_available_regions=security_hub_regions,
)
# Send the findings to Security Hub
findings_sent_to_security_hub = security_hub.batch_send_to_security_hub()
print(
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_sent_to_security_hub} findings sent to AWS Security Hub!{Style.RESET_ALL}"
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_archived_in_security_hub} findings archived in AWS Security Hub!{Style.RESET_ALL}"
)
# Resolve previous fails of Security Hub
if not args.skip_sh_update:
print(
f"{Style.BRIGHT}\nArchiving previous findings in AWS Security Hub, please wait...{Style.RESET_ALL}"
)
findings_archived_in_security_hub = (
security_hub.archive_previous_findings()
)
print(
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_archived_in_security_hub} findings archived in AWS Security Hub!{Style.RESET_ALL}"
)
# Display summary table
if not args.only_logs:
display_summary_table(

View File

@@ -3044,7 +3044,7 @@
"Id": "9.4",
"Description": "Ensure that Register with Entra ID is enabled on App Service",
"Checks": [
""
"app_client_certificates_on"
],
"Attributes": [
{
@@ -3066,7 +3066,7 @@
"Id": "9.5",
"Description": "Ensure That 'PHP version' is the Latest, If Used to Run the Web App",
"Checks": [
"app_ensure_php_version_is_latest"
"app_register_with_identity"
],
"Attributes": [
{
@@ -3088,7 +3088,7 @@
"Id": "9.6",
"Description": "Ensure that 'Python version' is the Latest Stable Version, if Used to Run the Web App",
"Checks": [
"app_ensure_python_version_is_latest"
"app_ensure_php_version_is_latest"
],
"Attributes": [
{
@@ -3110,7 +3110,7 @@
"Id": "9.7",
"Description": "Ensure that 'Java version' is the latest, if used to run the Web App",
"Checks": [
"app_ensure_java_version_is_latest"
"app_ensure_python_version_is_latest"
],
"Attributes": [
{
@@ -3132,7 +3132,7 @@
"Id": "9.8",
"Description": "Ensure that 'HTTP Version' is the Latest, if Used to Run the Web App",
"Checks": [
"app_ensure_using_http20"
"app_ensure_java_version_is_latest"
],
"Attributes": [
{
@@ -3154,7 +3154,7 @@
"Id": "9.9",
"Description": "Ensure FTP deployments are Disabled",
"Checks": [
"app_ftp_deployment_disabled"
"app_ensure_using_http20"
],
"Attributes": [
{
@@ -3176,7 +3176,7 @@
"Id": "9.10",
"Description": "Ensure Azure Key Vaults are Used to Store Secrets",
"Checks": [
""
"app_ftp_deployment_disabled"
],
"Attributes": [
{
@@ -3213,6 +3213,66 @@
"References": "https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-lock-resources:https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-subscription-governance#azure-resource-locks:https://docs.microsoft.com/en-us/azure/governance/blueprints/concepts/resource-locking:https://learn.microsoft.com/en-us/security/benchmark/azure/mcsb-asset-management#am-4-limit-access-to-asset-management"
}
]
},
{
"Id": "9.10",
"Description": "Ensure FTP deployments are Disabled",
"Checks": [],
"Attributes": [
{
"Section": "9. AppService",
"Profile": "Level 1",
"AssessmentStatus": "Automated",
"Description": "By default, Azure Functions, Web, and API Services can be deployed over FTP. If FTP is required for an essential deployment workflow, FTPS should be required for FTP login for all App Service Apps and Functions.",
"RationaleStatement": "Azure FTP deployment endpoints are public. An attacker listening to traffic on a wifi network used by a remote employee or a corporate network could see login traffic in clear-text which would then grant them full control of the code base of the app or service. This finding is more severe if User Credentials for deployment are set at the subscription level rather than using the default Application Credentials which are unique per App.",
"ImpactStatement": "Any deployment workflows that rely on FTP or FTPs rather than the WebDeploy or HTTPs endpoints may be affected.",
"RemediationProcedure": "**From Azure Portal** 1. Go to the Azure Portal 2. Select `App Services` 3. Click on an app 4. Select `Settings` and then `Configuration` 5. Under `General Settings`, for the `Platform Settings`, the `FTP state` should be set to `Disabled` or `FTPS Only` **From Azure CLI** For each out of compliance application, run the following choosing either 'disabled' or 'FtpsOnly' as appropriate: ``` az webapp config set --resource-group <resource group name> --name <app name> --ftps-state [disabled|FtpsOnly] ``` **From PowerShell** For each out of compliance application, run the following: ``` Set-AzWebApp -ResourceGroupName <resource group name> -Name <app name> -FtpsState <Disabled or FtpsOnly> ```",
"AuditProcedure": "**From Azure Portal** 1. Go to the Azure Portal 2. Select `App Services` 3. Click on an app 4. Select `Settings` and then `Configuration` 5. Under `General Settings`, for the `Platform Settings`, the `FTP state` should not be set to `All allowed` **From Azure CLI** List webapps to obtain the ids. ``` az webapp list ``` List the publish profiles to obtain the username, password and ftp server url. ``` az webapp deployment list-publishing-profiles --ids <ids> { publishUrl: <URL_FOR_WEB_APP>, userName: <USER_NAME>, userPWD: <USER_PASSWORD>, } ``` **From PowerShell** List all Web Apps: ``` Get-AzWebApp ``` For each app: ``` Get-AzWebApp -ResourceGroupName <resource group name> -Name <app name> | Select-Object -ExpandProperty SiteConfig ``` In the output, look for the value of **FtpsState**. If its value is **AllAllowed** the setting is out of compliance. Any other value is considered in compliance with this check.",
"AdditionalInformation": "",
"DefaultValue": "[Azure Web Service Deploy via FTP](https://docs.microsoft.com/en-us/azure/app-service/deploy-ftp):[Azure Web Service Deployment](https://docs.microsoft.com/en-us/azure/app-service/overview-security):https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-data-protection#dp-4-encrypt-sensitive-information-in-transit:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-posture-vulnerability-management#pv-7-rapidly-and-automatically-remediate-software-vulnerabilities",
"References": "TA0008, T1570, M1031"
}
]
},
{
"Id": "9.11",
"Description": "Ensure Azure Key Vaults are Used to Store Secrets",
"Checks": [],
"Attributes": [
{
"Section": "9. AppService",
"Profile": "Level 2",
"AssessmentStatus": "Manual",
"Description": "Azure Key Vault will store multiple types of sensitive information such as encryption keys, certificate thumbprints, and Managed Identity Credentials. Access to these 'Secrets' can be controlled through granular permissions.",
"RationaleStatement": "The credentials given to an application have permissions to create, delete, or modify data stored within the systems they access. If these credentials are stored within the application itself, anyone with access to the application or a copy of the code has access to them. Storing within Azure Key Vault as secrets increases security by controlling access. This also allows for updates of the credentials without redeploying the entire application.",
"ImpactStatement": "Integrating references to secrets within the key vault are required to be specifically integrated within the application code. This will require additional configuration to be made during the writing of an application, or refactoring of an already written one. There are also additional costs that are charged per 10000 requests to the Key Vault.",
"RemediationProcedure": "Remediation has 2 steps 1. Setup the Key Vault 2. Setup the App Service to use the Key Vault **Step 1: Set up the Key Vault** **From Azure CLI** ``` az keyvault create --name <name> --resource-group <myResourceGroup> --location myLocation ``` **From Powershell** ``` New-AzKeyvault -name <name> -ResourceGroupName <myResourceGroup> -Location <myLocation> ``` **Step 2: Set up the App Service to use the Key Vault** Sample JSON Template for App Service Configuration: ``` { //... resources: [ { type: Microsoft.Storage/storageAccounts, name: [variables('storageAccountName')], //... }, { type: Microsoft.Insights/components, name: [variables('appInsightsName')], //... }, { type: Microsoft.Web/sites, name: [variables('functionAppName')], identity: { type: SystemAssigned }, //... resources: [ { type: config, name: appsettings, //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))], [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.KeyVault/vaults/secrets', variables('keyVaultName'), variables('storageConnectionStringName'))], [resourceId('Microsoft.KeyVault/vaults/secrets', variables('keyVaultName'), variables('appInsightsKeyName'))] ], properties: { AzureWebJobsStorage: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('storageConnectionStringResourceId')).secretUriWithVersion, ')')], WEBSITE_CONTENTAZUREFILECONNECTIONSTRING: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('storageConnectionStringResourceId')).secretUriWithVersion, ')')], APPINSIGHTS_INSTRUMENTATIONKEY: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('appInsightsKeyResourceId')).secretUriWithVersion, ')')], WEBSITE_ENABLE_SYNC_UPDATE_SITE: true //... } }, { type: sourcecontrols, name: web, //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))], [resourceId('Microsoft.Web/sites/config', variables('functionAppName'), 'appsettings')] ], } ] }, { type: Microsoft.KeyVault/vaults, name: [variables('keyVaultName')], //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))] ], properties: { //... accessPolicies: [ { tenantId: [reference(concat('Microsoft.Web/sites/', variables('functionAppName'), '/providers/Microsoft.ManagedIdentity/Identities/default'), '2015-08-31-PREVIEW').tenantId], objectId: [reference(concat('Microsoft.Web/sites/', variables('functionAppName'), '/providers/Microsoft.ManagedIdentity/Identities/default'), '2015-08-31-PREVIEW').principalId], permissions: { secrets: [ get ] } } ] }, resources: [ { type: secrets, name: [variables('storageConnectionStringName')], //... dependsOn: [ [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName'))] ], properties: { value: [concat('DefaultEndpointsProtocol=https;AccountName=', variables('storageAccountName'), ';AccountKey=', listKeys(variables('storageAccountResourceId'),'2015-05-01-preview').key1)] } }, { type: secrets, name: [variables('appInsightsKeyName')], //... dependsOn: [ [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.Insights/components', variables('appInsightsName'))] ], properties: { value: [reference(resourceId('microsoft.insights/components/', variables('appInsightsName')), '2015-05-01').InstrumentationKey] } } ] } ] } ```",
"AuditProcedure": "**From Azure Portal** 1. Login to Azure Portal 2. In the expandable menu on the left go to `Key Vaults` 3. View the Key Vaults listed. **From Azure CLI** To list key vaults within a subscription run the following command: ``` Get-AzKeyVault ``` To list the secrets within these key vaults run the following command: ``` Get-AzKeyVaultSecret [-VaultName] <vault name> ``` **From Powershell** To list key vaults within a subscription run the following command: ``` Get-AzKeyVault ``` To list all secrets in a key vault run the following command: ``` Get-AzKeyVaultSecret -VaultName '<vaultName' ```",
"AdditionalInformation": "",
"DefaultValue": "https://docs.microsoft.com/en-us/azure/app-service/app-service-key-vault-references:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-identity-management#im-2-manage-application-identities-securely-and-automatically:https://docs.microsoft.com/en-us/cli/azure/keyvault?view=azure-cli-latest:https://docs.microsoft.com/en-us/cli/azure/keyvault?view=azure-cli-latest",
"References": "TA0006, T1552, M1041"
}
]
},
{
"Id": "10.1",
"Description": "Ensure that Resource Locks are set for Mission-Critical Azure Resources",
"Checks": [],
"Attributes": [
{
"Section": "10. Miscellaneous",
"Profile": "Level 2",
"AssessmentStatus": "Manual",
"Description": "Resource Manager Locks provide a way for administrators to lock down Azure resources to prevent deletion of, or modifications to, a resource. These locks sit outside of the Role Based Access Controls (RBAC) hierarchy and, when applied, will place restrictions on the resource for all users. These locks are very useful when there is an important resource in a subscription that users should not be able to delete or change. Locks can help prevent accidental and malicious changes or deletion.",
"RationaleStatement": "As an administrator, it may be necessary to lock a subscription, resource group, or resource to prevent other users in the organization from accidentally deleting or modifying critical resources. The lock level can be set to to `CanNotDelete` or `ReadOnly` to achieve this purpose. - `CanNotDelete` means authorized users can still read and modify a resource, but they cannot delete the resource. - `ReadOnly` means authorized users can read a resource, but they cannot delete or update the resource. Applying this lock is similar to restricting all authorized users to the permissions granted by the Reader role.",
"ImpactStatement": "There can be unintended outcomes of locking a resource. Applying a lock to a parent service will cause it to be inherited by all resources within. Conversely, applying a lock to a resource may not apply to connected storage, leaving it unlocked. Please see the documentation for further information.",
"RemediationProcedure": "**From Azure Portal** 1. Navigate to the specific Azure Resource or Resource Group 2. For each mission critical resource, click on `Locks` 3. Click `Add` 4. Give the lock a name and a description, then select the type, `Read-only` or `Delete` as appropriate 5. Click OK **From Azure CLI** To lock a resource, provide the name of the resource, its resource type, and its resource group name. ``` az lock create --name <LockName> --lock-type <CanNotDelete/Read-only> --resource-group <resourceGroupName> --resource-name <resourceName> --resource-type <resourceType> ``` **From Powershell** ``` Get-AzResourceLock -ResourceName <Resource Name> -ResourceType <Resource Type> -ResourceGroupName <Resource Group Name> -Locktype <CanNotDelete/Read-only> ```",
"AuditProcedure": "**From Azure Portal** 1. Navigate to the specific Azure Resource or Resource Group 2. Click on `Locks` 3. Ensure the lock is defined with name and description, with type `Read-only` or `Delete` as appropriate. **From Azure CLI** Review the list of all locks set currently: ``` az lock list --resource-group <resourcegroupname> --resource-name <resourcename> --namespace <Namespace> --resource-type <type> --parent ``` **From Powershell** Run the following command to list all resources. ``` Get-AzResource ``` For each resource, run the following command to check for Resource Locks. ``` Get-AzResourceLock -ResourceName <Resource Name> -ResourceType <Resource Type> -ResourceGroupName <Resource Group Name> ``` Review the output of the `Properties` setting. Compliant settings will have the `CanNotDelete` or `ReadOnly` value.",
"AdditionalInformation": "",
"DefaultValue": "https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-lock-resources:https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-subscription-governance#azure-resource-locks:https://docs.microsoft.com/en-us/azure/governance/blueprints/concepts/resource-locking:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-asset-management#am-4-limit-access-to-asset-management",
"References": ""
}
]
}
]
}

View File

@@ -1288,7 +1288,7 @@
],
"Attributes": [
{
"Section": "2 Etcd",
"Section": "2 etcd",
"Profile": "Level 1 - Master Node",
"AssessmentStatus": "Automated",
"Description": "Configure TLS encryption for the etcd service.",
@@ -1310,7 +1310,7 @@
],
"Attributes": [
{
"Section": "2 Etcd",
"Section": "2 etcd",
"Profile": "Level 1 - Master Node",
"AssessmentStatus": "Automated",
"Description": "Enable client authentication on etcd service.",
@@ -1332,7 +1332,7 @@
],
"Attributes": [
{
"Section": "2 Etcd",
"Section": "2 etcd",
"Profile": "Level 1 - Master Node",
"AssessmentStatus": "Automated",
"Description": "Do not use self-signed certificates for TLS.",
@@ -1354,7 +1354,7 @@
],
"Attributes": [
{
"Section": "2 Etcd",
"Section": "2 etcd",
"Profile": "Level 1 - Master Node",
"AssessmentStatus": "Automated",
"Description": "etcd should be configured to make use of TLS encryption for peer connections.",
@@ -1376,7 +1376,7 @@
],
"Attributes": [
{
"Section": "2 Etcd",
"Section": "2 etcd",
"Profile": "Level 1 - Master Node",
"AssessmentStatus": "Automated",
"Description": "etcd should be configured for peer authentication.",
@@ -1398,7 +1398,7 @@
],
"Attributes": [
{
"Section": "2 Etcd",
"Section": "2 etcd",
"Profile": "Level 1 - Master Node",
"AssessmentStatus": "Automated",
"Description": "Do not use automatically generated self-signed certificates for TLS connections between peers.",
@@ -1420,7 +1420,7 @@
],
"Attributes": [
{
"Section": "2 Etcd",
"Section": "2 etcd",
"Profile": "Level 2 - Master Node",
"AssessmentStatus": "Manual",
"Description": "Use a different certificate authority for etcd from the one used for Kubernetes.",
@@ -2634,7 +2634,7 @@
],
"Attributes": [
{
"Section": "5.4 Secrets Management",
"Section": "5.4",
"Profile": "Level 2 - Master Node",
"AssessmentStatus": "Manual",
"Description": "Kubernetes supports mounting secrets as data volumes or as environment variables. Minimize the use of environment variable secrets.",

View File

@@ -5,13 +5,12 @@ from os import getcwd
import requests
import yaml
from packaging import version
from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "4.3.5"
prowler_version = "4.2.4"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
@@ -65,7 +64,6 @@ default_config_file_path = (
default_fixer_config_file_path = (
f"{pathlib.Path(os.path.dirname(os.path.realpath(__file__)))}/fixer_config.yaml"
)
encoding_format_utf_8 = "utf-8"
available_output_formats = ["csv", "json-asff", "json-ocsf", "html"]
@@ -87,7 +85,7 @@ def check_current_version():
"https://api.github.com/repos/prowler-cloud/prowler/tags", timeout=1
)
latest_version = release_response.json()[0]["name"]
if version.parse(latest_version) > version.parse(prowler_version):
if latest_version != prowler_version:
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
else:
return (
@@ -111,7 +109,7 @@ def load_and_validate_config_file(provider: str, config_file_path: str) -> dict:
dict: The configuration dictionary for the specified provider.
"""
try:
with open(config_file_path, "r", encoding=encoding_format_utf_8) as f:
with open(config_file_path, "r", encoding="utf-8") as f:
config_file = yaml.safe_load(f)
# Not to introduce a breaking change, allow the old format config file without any provider keys
@@ -158,9 +156,12 @@ def load_and_validate_fixer_config_file(
Returns:
dict: The fixer configuration dictionary for the specified provider.
Raises:
SystemExit: If there is an error reading or parsing the fixer configuration file.
"""
try:
with open(fixer_config_file_path, "r", encoding=encoding_format_utf_8) as f:
with open(fixer_config_file_path, "r", encoding="utf-8") as f:
fixer_config_file = yaml.safe_load(f)
return fixer_config_file.get(provider, {})

View File

@@ -272,18 +272,6 @@ aws:
# aws.acm_certificates_expiration_check
days_to_expire_threshold: 7
# AWS EKS Configuration
# aws.eks_control_plane_logging_all_types_enabled
# EKS control plane logging types that must be enabled
eks_required_log_types:
[
"api",
"audit",
"authenticator",
"controllerManager",
"scheduler",
]
# Azure Configuration
azure:
# Azure Network Configuration

View File

@@ -19,6 +19,7 @@ from prowler.lib.check.compliance_models import load_compliance_framework
from prowler.lib.check.custom_checks_metadata import update_check_metadata
from prowler.lib.check.models import Check, load_check_metadata
from prowler.lib.logger import logger
from prowler.lib.mutelist.mutelist import mutelist_findings
from prowler.lib.outputs.outputs import report
from prowler.lib.utils.utils import open_file, parse_json_file, print_boxes
from prowler.providers.common.models import Audit_Metadata
@@ -125,10 +126,9 @@ def parse_checks_from_file(input_file: str, provider: str) -> set:
# Load checks from custom folder
def parse_checks_from_folder(provider, input_folder: str) -> set:
# TODO: move the AWS-specific code into the provider
def parse_checks_from_folder(provider, input_folder: str) -> int:
try:
custom_checks = set()
imported_checks = 0
# Check if input folder is a S3 URI
if provider.type == "aws" and re.search(
"^s3://([^/]+)/(.*?([^/]+))/$", input_folder
@@ -156,8 +156,8 @@ def parse_checks_from_folder(provider, input_folder: str) -> set:
if os.path.exists(prowler_module):
shutil.rmtree(prowler_module)
shutil.copytree(check_module, prowler_module)
custom_checks.add(check.name)
return custom_checks
imported_checks += 1
return imported_checks
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
@@ -611,9 +611,9 @@ def execute_checks(
else:
# Prepare your messages
messages = [f"Config File: {Fore.YELLOW}{config_file}{Style.RESET_ALL}"]
if global_provider.mutelist.mutelist_file_path:
if global_provider.mutelist_file_path:
messages.append(
f"Mutelist File: {Fore.YELLOW}{global_provider.mutelist.mutelist_file_path}{Style.RESET_ALL}"
f"Mutelist File: {Fore.YELLOW}{global_provider.mutelist_file_path}{Style.RESET_ALL}"
)
if global_provider.type == "aws":
messages.append(
@@ -706,14 +706,6 @@ def execute(
check_class, verbose, global_provider.output_options.only_logs
)
# Exclude findings per status
if global_provider.output_options.status:
check_findings = [
finding
for finding in check_findings
if finding.status in global_provider.output_options.status
]
# Update Audit Status
services_executed.add(service)
checks_executed.add(check_name)
@@ -722,21 +714,11 @@ def execute(
)
# Mutelist findings
if hasattr(global_provider, "mutelist") and global_provider.mutelist.mutelist:
# TODO: make this prettier
is_finding_muted_args = {}
if global_provider.type == "aws":
is_finding_muted_args["aws_account_id"] = (
global_provider.identity.account
)
elif global_provider.type == "kubernetes":
is_finding_muted_args["cluster"] = global_provider.identity.cluster
for finding in check_findings:
is_finding_muted_args["finding"] = finding
finding.muted = global_provider.mutelist.is_finding_muted(
**is_finding_muted_args
)
if hasattr(global_provider, "mutelist") and global_provider.mutelist:
check_findings = mutelist_findings(
global_provider,
check_findings,
)
# Refactor(Outputs)
# Report the check's findings

View File

@@ -1,21 +1,16 @@
import sys
from prowler.lib.check.compliance_models import ComplianceBaseModel
from pydantic import parse_obj_as
from prowler.lib.check.compliance_models import Compliance_Base_Model
from prowler.lib.check.models import Check_Metadata_Model
from prowler.lib.logger import logger
def update_checks_metadata_with_compliance(
bulk_compliance_frameworks: dict, bulk_checks_metadata: dict
) -> dict:
"""
Update the check metadata model with the compliance framework
Args:
bulk_compliance_frameworks (dict): The compliance frameworks
bulk_checks_metadata (dict): The checks metadata
Returns:
dict: The checks metadata with the compliance frameworks
"""
):
"""Update the check metadata model with the compliance framework"""
try:
for check in bulk_checks_metadata:
check_compliance = []
@@ -27,7 +22,7 @@ def update_checks_metadata_with_compliance(
# Include the requirement into the check's framework requirements
compliance_requirements.append(requirement)
# Create the Compliance_Model
compliance = ComplianceBaseModel(
compliance = Compliance_Base_Model(
Framework=framework.Framework,
Provider=framework.Provider,
Version=framework.Version,
@@ -38,6 +33,53 @@ def update_checks_metadata_with_compliance(
check_compliance.append(compliance)
# Save it into the check's metadata
bulk_checks_metadata[check].Compliance = check_compliance
# Add requirements of Manual Controls
for framework in bulk_compliance_frameworks.values():
for requirement in framework.Requirements:
compliance_requirements = []
# Verify if requirement is Manual
if not requirement.Checks:
compliance_requirements.append(requirement)
# Create the Compliance_Model
compliance = Compliance_Base_Model(
Framework=framework.Framework,
Provider=framework.Provider,
Version=framework.Version,
Description=framework.Description,
Requirements=compliance_requirements,
)
# Include the compliance framework for the check
check_compliance.append(compliance)
# Create metadata for Manual Control
manual_check_metadata = {
"Provider": framework.Provider.lower(),
"CheckID": "manual_check",
"CheckTitle": "Manual Check",
"CheckType": [],
"ServiceName": "",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "low",
"ResourceType": "",
"Description": "",
"Risk": "",
"RelatedUrl": "",
"Remediation": {
"Code": {"CLI": "", "NativeIaC": "", "Other": "", "Terraform": ""},
"Recommendation": {"Text": "", "Url": ""},
},
"Categories": [],
"Tags": {},
"DependsOn": [],
"RelatedTo": [],
"Notes": "",
}
manual_check = parse_obj_as(Check_Metadata_Model, manual_check_metadata)
# Save it into the check's metadata
bulk_checks_metadata["manual_check"] = manual_check
bulk_checks_metadata["manual_check"].Compliance = check_compliance
return bulk_checks_metadata
except Exception as e:
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")

View File

@@ -91,6 +91,7 @@ class CIS_Requirement_Attribute(BaseModel):
AdditionalInformation: str
DefaultValue: Optional[str]
References: str
DefaultValue: Optional[str]
# Well Architected Requirement Attribute
@@ -188,8 +189,8 @@ class Compliance_Requirement(BaseModel):
Checks: list[str]
class ComplianceBaseModel(BaseModel):
"""ComplianceBaseModel holds the base model for every compliance framework"""
class Compliance_Base_Model(BaseModel):
"""Compliance_Base_Model holds the base model for every compliance framework"""
Framework: str
Provider: str
@@ -217,10 +218,10 @@ class ComplianceBaseModel(BaseModel):
# Testing Pending
def load_compliance_framework(
compliance_specification_file: str,
) -> ComplianceBaseModel:
) -> Compliance_Base_Model:
"""load_compliance_framework loads and parse a Compliance Framework Specification"""
try:
compliance_framework = ComplianceBaseModel.parse_file(
compliance_framework = Compliance_Base_Model.parse_file(
compliance_specification_file
)
except ValidationError as error:

View File

@@ -96,8 +96,6 @@ class Check(ABC, Check_Metadata_Model):
data = Check_Metadata_Model.parse_file(metadata_file).dict()
# Calls parents init function
super().__init__(**data)
# TODO: verify that the CheckID is the same as the filename and classname
# to mimic the test done at test_<provider>_checks_metadata_is_valid
def metadata(self) -> dict:
"""Return the JSON representation of the check's metadata"""

View File

@@ -263,7 +263,7 @@ Detailed documentation at https://docs.prowler.com
group.add_argument(
"--compliance",
nargs="+",
help="Compliance Framework to check against for. The format should be the following: framework_version_provider (e.g.: cis_3.0_aws)",
help="Compliance Framework to check against for. The format should be the following: framework_version_provider (e.g.: ens_rd2022_aws)",
choices=available_compliance_frameworks,
)
group.add_argument(

View File

@@ -1,342 +1,373 @@
import re
from abc import ABC, abstractmethod
from typing import Any
import yaml
from prowler.lib.logger import logger
from prowler.lib.mutelist.models import mutelist_schema
from prowler.lib.outputs.utils import unroll_tags
class Mutelist(ABC):
def get_mutelist_file_from_local_file(mutelist_path: str):
try:
with open(mutelist_path) as f:
mutelist = yaml.safe_load(f)["Mutelist"]
return mutelist
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return {}
def validate_mutelist(mutelist: dict) -> dict:
try:
mutelist = mutelist_schema.validate(mutelist)
return mutelist
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- Mutelist YAML is malformed - {error}[{error.__traceback__.tb_lineno}]"
)
return {}
def mutelist_findings(
global_provider: Any,
check_findings: list[Any],
) -> list[Any]:
# Check if finding is muted
for finding in check_findings:
# TODO: Move this mapping to the execute_check function and pass that output to the mutelist and the report
if global_provider.type == "aws":
finding.muted = is_muted(
global_provider.mutelist,
global_provider.identity.account,
finding.check_metadata.CheckID,
finding.region,
finding.resource_id,
unroll_tags(finding.resource_tags),
)
elif global_provider.type == "azure":
finding.muted = is_muted(
global_provider.mutelist,
finding.subscription,
finding.check_metadata.CheckID,
# TODO: add region to the findings when we add Azure Locations
# finding.region,
"",
finding.resource_name,
unroll_tags(finding.resource_tags),
)
elif global_provider.type == "gcp":
finding.muted = is_muted(
global_provider.mutelist,
finding.project_id,
finding.check_metadata.CheckID,
finding.location,
finding.resource_name,
unroll_tags(finding.resource_tags),
)
elif global_provider.type == "kubernetes":
finding.muted = is_muted(
global_provider.mutelist,
global_provider.identity.cluster,
finding.check_metadata.CheckID,
finding.namespace,
finding.resource_name,
unroll_tags(finding.resource_tags),
)
return check_findings
def is_muted(
mutelist: dict,
audited_account: str,
check: str,
finding_region: str,
finding_resource: str,
finding_tags,
) -> bool:
"""
Abstract base class for managing a mutelist.
Check if the provided finding is muted for the audited account, check, region, resource and tags.
Attributes:
_mutelist (dict): Dictionary containing information about muted checks for different accounts.
_mutelist_file_path (str): Path to the mutelist file.
MUTELIST_KEY (str): Key used to access the mutelist in the mutelist file.
Args:
mutelist (dict): Dictionary containing information about muted checks for different accounts.
audited_account (str): The account being audited.
check (str): The check to be evaluated for muting.
finding_region (str): The region where the finding occurred.
finding_resource (str): The resource related to the finding.
finding_tags: The tags associated with the finding.
Methods:
__init__: Initializes a Mutelist object.
mutelist: Property that returns the mutelist dictionary.
mutelist_file_path: Property that returns the mutelist file path.
is_finding_muted: Abstract method to check if a finding is muted.
get_mutelist_file_from_local_file: Retrieves the mutelist file from a local file.
validate_mutelist: Validates the mutelist against a schema.
is_muted: Checks if a finding is muted for the audited account, check, region, resource, and tags.
is_muted_in_check: Checks if a check is muted.
is_excepted: Checks if the account, region, resource, and tags are excepted based on the exceptions.
Returns:
bool: True if the finding is muted for the audited account, check, region, resource and tags., otherwise False.
"""
try:
# By default is not muted
is_finding_muted = False
_mutelist: dict = {}
_mutelist_file_path: str = None
MUTELIST_KEY = "Mutelist"
def __init__(
self, mutelist_path: str = "", mutelist_content: dict = {}
) -> "Mutelist":
if mutelist_path:
self._mutelist_file_path = mutelist_path
self.get_mutelist_file_from_local_file(mutelist_path)
else:
self._mutelist = mutelist_content
if self._mutelist:
self.validate_mutelist()
@property
def mutelist(self) -> dict:
return self._mutelist
@property
def mutelist_file_path(self) -> dict:
return self._mutelist_file_path
@abstractmethod
def is_finding_muted(self) -> bool:
raise NotImplementedError
def get_mutelist_file_from_local_file(self, mutelist_path: str):
try:
with open(mutelist_path) as f:
self._mutelist = yaml.safe_load(f)[self.MUTELIST_KEY]
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
def validate_mutelist(self) -> bool:
try:
self._mutelist = mutelist_schema.validate(self._mutelist)
return True
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- Mutelist YAML is malformed - {error}[{error.__traceback__.tb_lineno}]"
)
self._mutelist = {}
return False
def is_muted(
self,
audited_account: str,
check: str,
finding_region: str,
finding_resource: str,
finding_tags,
) -> bool:
"""
Check if the provided finding is muted for the audited account, check, region, resource and tags.
The Mutelist works in a way that each field is ANDed, so if a check is muted for an account, region, resource and tags, it will be muted.
The exceptions are ORed, so if a check is excepted for an account, region, resource or tags, it will not be muted.
The only particularity is the tags, which are ORed.
So, for the following Mutelist:
```
Mutelist:
Accounts:
'*':
Checks:
ec2_instance_detailed_monitoring_enabled:
Regions: ['*']
Resources:
- 'i-123456789'
Tags:
- 'Name=AdminInstance | Environment=Prod'
```
The check `ec2_instance_detailed_monitoring_enabled` will be muted for all accounts and regions and for the resource_id 'i-123456789' with at least one of the tags 'Name=AdminInstance' or 'Environment=Prod'.
Args:
mutelist (dict): Dictionary containing information about muted checks for different accounts.
audited_account (str): The account being audited.
check (str): The check to be evaluated for muting.
finding_region (str): The region where the finding occurred.
finding_resource (str): The resource related to the finding.
finding_tags: The tags associated with the finding.
Returns:
bool: True if the finding is muted for the audited account, check, region, resource and tags., otherwise False.
"""
try:
# By default is not muted
is_finding_muted = False
# We always check all the accounts present in the mutelist
# if one mutes the finding we set the finding as muted
for account in self._mutelist.get("Accounts", []):
if account == audited_account or account == "*":
if self.is_muted_in_check(
self._mutelist["Accounts"][account]["Checks"],
audited_account,
check,
finding_region,
finding_resource,
finding_tags,
):
is_finding_muted = True
break
return is_finding_muted
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return False
def is_muted_in_check(
self,
muted_checks,
audited_account,
check,
finding_region,
finding_resource,
finding_tags,
) -> bool:
"""
Check if the provided check is muted.
Args:
muted_checks (dict): Dictionary containing information about muted checks.
audited_account (str): The account to be audited.
check (str): The check to be evaluated for muting.
finding_region (str): The region where the finding occurred.
finding_resource (str): The resource related to the finding.
finding_tags (str): The tags associated with the finding.
Returns:
bool: True if the check is muted, otherwise False.
"""
try:
# Default value is not muted
is_check_muted = False
for muted_check, muted_check_info in muted_checks.items():
# map lambda to awslambda
muted_check = re.sub("^lambda", "awslambda", muted_check)
check_match = (
"*" == muted_check
or check == muted_check
or self.is_item_matched([muted_check], check)
)
# Check if the finding is excepted
exceptions = muted_check_info.get("Exceptions")
if (
self.is_excepted(
exceptions,
audited_account,
finding_region,
finding_resource,
finding_tags,
)
and check_match
# We always check all the accounts present in the mutelist
# if one mutes the finding we set the finding as muted
for account in mutelist["Accounts"]:
if account == audited_account or account == "*":
if is_muted_in_check(
mutelist["Accounts"][account]["Checks"],
audited_account,
check,
finding_region,
finding_resource,
finding_tags,
):
# Break loop and return default value since is excepted
is_finding_muted = True
break
muted_regions = muted_check_info.get("Regions")
muted_resources = muted_check_info.get("Resources")
muted_tags = muted_check_info.get("Tags", "*")
# We need to set the muted_tags if None, "" or [], so the falsy helps
if not muted_tags:
muted_tags = "*"
# If there is a *, it affects to all checks
if check_match:
muted_in_check = True
muted_in_region = self.is_item_matched(
muted_regions, finding_region
)
muted_in_resource = self.is_item_matched(
muted_resources, finding_resource
)
muted_in_tags = self.is_item_matched(
muted_tags, finding_tags, tag=True
)
return is_finding_muted
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return False
# For a finding to be muted requires the following set to True:
# - muted_in_check -> True
# - muted_in_region -> True
# - muted_in_tags -> True
# - muted_in_resource -> True
# - excepted -> False
if (
muted_in_check
and muted_in_region
and muted_in_tags
and muted_in_resource
):
is_check_muted = True
def is_muted_in_check(
muted_checks,
audited_account,
check,
finding_region,
finding_resource,
finding_tags,
) -> bool:
"""
Check if the provided check is muted.
return is_check_muted
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
Args:
muted_checks (dict): Dictionary containing information about muted checks.
audited_account (str): The account to be audited.
check (str): The check to be evaluated for muting.
finding_region (str): The region where the finding occurred.
finding_resource (str): The resource related to the finding.
finding_tags (str): The tags associated with the finding.
Returns:
bool: True if the check is muted, otherwise False.
"""
try:
# Default value is not muted
is_check_muted = False
for muted_check, muted_check_info in muted_checks.items():
# map lambda to awslambda
muted_check = re.sub("^lambda", "awslambda", muted_check)
check_match = (
"*" == muted_check
or check == muted_check
or re.search(muted_check, check)
)
return False
def is_excepted(
self,
exceptions,
audited_account,
finding_region,
finding_resource,
finding_tags,
) -> bool:
"""
Check if the provided account, region, resource, and tags are excepted based on the exceptions dictionary.
Args:
exceptions (dict): Dictionary containing exceptions for different attributes like Accounts, Regions, Resources, and Tags.
audited_account (str): The account to be audited.
finding_region (str): The region where the finding occurred.
finding_resource (str): The resource related to the finding.
finding_tags (str): The tags associated with the finding.
Returns:
bool: True if the account, region, resource, and tags are excepted based on the exceptions, otherwise False.
"""
try:
excepted = False
is_account_excepted = False
is_region_excepted = False
is_resource_excepted = False
is_tag_excepted = False
if exceptions:
excepted_accounts = exceptions.get("Accounts", [])
is_account_excepted = self.is_item_matched(
excepted_accounts, audited_account
# Check if the finding is excepted
exceptions = muted_check_info.get("Exceptions")
if (
is_excepted(
exceptions,
audited_account,
finding_region,
finding_resource,
finding_tags,
)
and check_match
):
# Break loop and return default value since is excepted
break
excepted_regions = exceptions.get("Regions", [])
is_region_excepted = self.is_item_matched(
excepted_regions, finding_region
muted_regions = muted_check_info.get("Regions")
muted_resources = muted_check_info.get("Resources")
muted_tags = muted_check_info.get("Tags", "*")
# We need to set the muted_tags if None, "" or [], so the falsy helps
if not muted_tags:
muted_tags = "*"
# If there is a *, it affects to all checks
if check_match:
muted_in_check = True
muted_in_region = is_muted_in_region(muted_regions, finding_region)
muted_in_resource = is_muted_in_resource(
muted_resources, finding_resource
)
muted_in_tags = is_muted_in_tags(muted_tags, finding_tags)
excepted_resources = exceptions.get("Resources", [])
is_resource_excepted = self.is_item_matched(
excepted_resources, finding_resource
)
excepted_tags = exceptions.get("Tags", [])
is_tag_excepted = self.is_item_matched(
excepted_tags, finding_tags, tag=True
)
# For a finding to be muted requires the following set to True:
# - muted_in_check -> True
# - muted_in_region -> True
# - muted_in_tags -> True
# - muted_in_resource -> True
# - excepted -> False
if (
not is_account_excepted
and not is_region_excepted
and not is_resource_excepted
and not is_tag_excepted
muted_in_check
and muted_in_region
and muted_in_tags
and muted_in_resource
):
excepted = False
elif (
(is_account_excepted or not excepted_accounts)
and (is_region_excepted or not excepted_regions)
and (is_resource_excepted or not excepted_resources)
and (is_tag_excepted or not excepted_tags)
):
excepted = True
return excepted
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
is_check_muted = True
return is_check_muted
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return False
def is_muted_in_region(
mutelist_regions,
finding_region,
) -> bool:
"""
Check if the finding_region is present in the mutelist_regions.
Args:
mutelist_regions (list): List of regions in the mute list.
finding_region (str): Region to check if it is muted.
Returns:
bool: True if the finding_region is muted in any of the mutelist_regions, otherwise False.
"""
try:
return __is_item_matched__(mutelist_regions, finding_region)
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return False
def is_muted_in_tags(muted_tags, finding_tags) -> bool:
"""
Check if any of the muted tags are present in the finding tags.
Args:
muted_tags (list): List of muted tags to be checked.
finding_tags (str): String containing tags to search for muted tags.
Returns:
bool: True if any of the muted tags are present in the finding tags, otherwise False.
"""
try:
return __is_item_matched__(muted_tags, finding_tags)
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return False
def is_muted_in_resource(muted_resources, finding_resource) -> bool:
"""
Check if any of the muted_resources are present in the finding_resource.
Args:
muted_resources (list): List of muted resources to be checked.
finding_resource (str): Resource to search for muted resources.
Returns:
bool: True if any of the muted_resources are present in the finding_resource, otherwise False.
"""
try:
return __is_item_matched__(muted_resources, finding_resource)
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return False
def is_excepted(
exceptions,
audited_account,
finding_region,
finding_resource,
finding_tags,
) -> bool:
"""
Check if the provided account, region, resource, and tags are excepted based on the exceptions dictionary.
Args:
exceptions (dict): Dictionary containing exceptions for different attributes like Accounts, Regions, Resources, and Tags.
audited_account (str): The account to be audited.
finding_region (str): The region where the finding occurred.
finding_resource (str): The resource related to the finding.
finding_tags (str): The tags associated with the finding.
Returns:
bool: True if the account, region, resource, and tags are excepted based on the exceptions, otherwise False.
"""
try:
excepted = False
is_account_excepted = False
is_region_excepted = False
is_resource_excepted = False
is_tag_excepted = False
if exceptions:
excepted_accounts = exceptions.get("Accounts", [])
is_account_excepted = __is_item_matched__(
excepted_accounts, audited_account
)
return False
@staticmethod
def is_item_matched(matched_items, finding_items, tag=False) -> bool:
"""
Check if any of the items in matched_items are present in finding_items.
excepted_regions = exceptions.get("Regions", [])
is_region_excepted = __is_item_matched__(excepted_regions, finding_region)
Args:
matched_items (list): List of items to be matched.
finding_items (str): String to search for matched items.
excepted_resources = exceptions.get("Resources", [])
is_resource_excepted = __is_item_matched__(
excepted_resources, finding_resource
)
Returns:
bool: True if any of the matched_items are present in finding_items, otherwise False.
"""
try:
is_item_matched = False
if matched_items and (finding_items or finding_items == ""):
if tag:
excepted_tags = exceptions.get("Tags", [])
is_tag_excepted = __is_item_matched__(excepted_tags, finding_tags)
if (
not is_account_excepted
and not is_region_excepted
and not is_resource_excepted
and not is_tag_excepted
):
excepted = False
elif (
(is_account_excepted or not excepted_accounts)
and (is_region_excepted or not excepted_regions)
and (is_resource_excepted or not excepted_resources)
and (is_tag_excepted or not excepted_tags)
):
excepted = True
return excepted
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return False
def __is_item_matched__(matched_items, finding_items):
"""
Check if any of the items in matched_items are present in finding_items.
Args:
matched_items (list): List of items to be matched.
finding_items (str): String to search for matched items.
Returns:
bool: True if any of the matched_items are present in finding_items, otherwise False.
"""
try:
is_item_matched = False
if matched_items and (finding_items or finding_items == ""):
for item in matched_items:
if item.startswith("*"):
item = ".*" + item[1:]
if re.search(item, finding_items):
is_item_matched = True
for item in matched_items:
if item.startswith("*"):
item = ".*" + item[1:]
if tag:
if not re.search(item, finding_items):
is_item_matched = False
break
else:
if re.search(item, finding_items):
is_item_matched = True
break
return is_item_matched
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return False
break
return is_item_matched
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return False

View File

@@ -1,401 +0,0 @@
from json import dump
from os import SEEK_SET
from typing import Optional
from pydantic import BaseModel, validator
from prowler.config.config import prowler_version, timestamp_utc
from prowler.lib.logger import logger
from prowler.lib.outputs.finding import Finding
from prowler.lib.outputs.output import Output
from prowler.lib.utils.utils import hash_sha512
class ASFF(Output):
"""
ASFF class represents a transformation of findings into AWS Security Finding Format (ASFF).
This class provides methods to transform a list of findings into the ASFF format required by AWS Security Hub. It includes operations such as generating unique identifiers, formatting timestamps, handling compliance frameworks, and ensuring the status values match the allowed values in ASFF.
Attributes:
- _data: A list to store the transformed findings.
- _file_descriptor: A file descriptor to write to file.
Methods:
- transform(findings: list[Finding]) -> None: Transforms a list of findings into ASFF format.
- batch_write_data_to_file() -> None: Writes the findings data to a file in JSON ASFF format.
- generate_status(status: str, muted: bool = False) -> str: Generates the ASFF status based on the provided status and muted flag.
References:
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
- AWS Security Finding Format Syntax: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html
"""
def transform(self, findings: list[Finding]) -> None:
"""
Transforms a list of findings into AWS Security Finding Format (ASFF).
This method iterates over the list of findings provided as input and transforms each finding into the ASFF format required by AWS Security Hub. It performs several operations for each finding, including generating unique identifiers, formatting timestamps, handling compliance frameworks, and ensuring the status values match the allowed values in ASFF.
Parameters:
- findings (list[Finding]): A list of Finding objects representing the findings to be transformed.
Returns:
- None
Notes:
- The method skips findings with a status of "MANUAL" as it is not valid in SecurityHub.
- It generates unique identifiers for each finding based on specific attributes.
- It formats timestamps in the required ASFF format.
- It handles compliance frameworks and associated standards for each finding.
- It ensures that the finding status matches the allowed values in ASFF.
References:
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
- AWS Security Finding Format Syntax: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html
"""
try:
for finding in findings:
# MANUAL status is not valid in SecurityHub
# https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
if finding.status == "MANUAL":
continue
timestamp = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
associated_standards, compliance_summary = ASFF.format_compliance(
finding.compliance
)
# Ensures finding_status matches allowed values in ASFF
finding_status = ASFF.generate_status(finding.status, finding.muted)
self._data.append(
AWSSecurityFindingFormat(
# The following line cannot be changed because it is the format we use to generate unique findings for AWS Security Hub
# If changed some findings could be lost because the unique identifier will be different
Id=f"prowler-{finding.check_id}-{finding.account_uid}-{finding.region}-{hash_sha512(finding.resource_uid)}",
ProductArn=f"arn:{finding.partition}:securityhub:{finding.region}::product/prowler/prowler",
ProductFields=ProductFields(
ProwlerResourceName=finding.resource_uid,
),
GeneratorId="prowler-" + finding.check_id,
AwsAccountId=finding.account_uid,
Types=(
finding.check_type.split(",")
if finding.check_type
else ["Software and Configuration Checks"]
),
FirstObservedAt=timestamp,
UpdatedAt=timestamp,
CreatedAt=timestamp,
Severity=Severity(Label=finding.severity.value),
Title=finding.check_title,
Description=finding.description,
Resources=[
Resource(
Id=finding.resource_uid,
Type=finding.resource_type,
Partition=finding.partition,
Region=finding.region,
Tags=finding.resource_tags,
)
],
Compliance=Compliance(
Status=finding_status,
AssociatedStandards=associated_standards,
RelatedRequirements=compliance_summary,
),
Remediation=Remediation(
Recommendation=Recommendation(
Text=finding.remediation_recommendation_text,
Url=finding.remediation_recommendation_url,
)
),
)
)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def batch_write_data_to_file(self) -> None:
"""
Writes the findings data to a file in JSON ASFF format.
This method iterates over the findings data stored in the '_data' attribute and writes it to the file descriptor '_file_descriptor' in JSON format. It starts by writing the JSON opening/header '[', then iterates over each finding, dumping it to the file with an indent of 4 spaces. After writing all findings, it writes the closing ']' to complete the JSON array structure. Finally, it closes the file descriptor.
Returns:
None
"""
try:
if (
getattr(self, "_file_descriptor", None)
and not self._file_descriptor.closed
and self._data
):
# Write JSON opening/header [
self._file_descriptor.write("[")
# Write findings
for finding in self._data:
dump(
finding.dict(exclude_none=True),
self._file_descriptor,
indent=4,
)
self._file_descriptor.write(",")
# Write footer/closing ]
if self._file_descriptor.tell() > 0:
if self._file_descriptor.tell() != 1:
self._file_descriptor.seek(
self._file_descriptor.tell() - 1, SEEK_SET
)
self._file_descriptor.truncate()
self._file_descriptor.write("]")
# Close file descriptor
self._file_descriptor.close()
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
@staticmethod
def generate_status(status: str, muted: bool = False) -> str:
"""
Generates the ASFF status based on the provided status and muted flag.
Parameters:
- status (str): The status of the finding.
- muted (bool): Flag indicating if the finding is muted.
Returns:
- str: The ASFF status corresponding to the provided status and muted flag.
References:
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
"""
json_asff_status = ""
if muted:
# Per AWS Security Hub "MUTED" is not a valid status
# https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
json_asff_status = "WARNING"
else:
if status == "PASS":
json_asff_status = "PASSED"
elif status == "FAIL":
json_asff_status = "FAILED"
else:
# MANUAL is set to NOT_AVAILABLE
json_asff_status = "NOT_AVAILABLE"
return json_asff_status
@staticmethod
def format_compliance(compliance: dict) -> tuple[list[dict], list[str]]:
"""
Transforms a dictionary of compliance data into a tuple of associated standards and compliance summaries.
Parameters:
- compliance (dict): A dictionary containing compliance data where keys are standards and values are lists of compliance details.
Returns:
- tuple[list[dict], list[str]]: A tuple containing a list of associated standards (each as a dictionary with 'StandardsId') and a list of compliance summaries.
Notes:
- The method limits the number of associated standards to 20.
- Each compliance summary is a concatenation of the standard key and its associated compliance details.
- If the concatenated summary exceeds 64 characters, it is truncated to 63 characters.
Example:
format_compliance({"standard1": ["detail1", "detail2"], "standard2": ["detail3"]}) -> ([{"StandardsId": "standard1"}, {"StandardsId": "standard2"}], ["standard1 detail1 detail2", "standard2 detail3"])
"""
compliance_summary = []
associated_standards = []
for key, value in compliance.items():
if (
len(associated_standards) < 20
): # AssociatedStandards should NOT have more than 20 items
associated_standards.append({"StandardsId": key})
item = f"{key} {' '.join(value)}"
if len(item) > 64:
item = item[0:63]
compliance_summary.append(item)
return associated_standards, compliance_summary
class ProductFields(BaseModel):
"""
Class representing the Product Fields of a finding in the AWS Security Finding Format.
Attributes:
- ProviderName (str): The name of the provider, default value is "Prowler".
- ProviderVersion (str): The version of the provider, fetched from the prowler_version in config.py.
- ProwlerResourceName (str): The name of the Prowler resource.
"""
ProviderName: str = "Prowler"
ProviderVersion: str = prowler_version
ProwlerResourceName: str
class Severity(BaseModel):
"""
Class representing the severity of a finding in the AWS Security Finding Format.
Attributes:
- Label (str): A string representing the severity label of the finding.
This class is used to define the severity level of a finding in the AWS Security Finding Format.
"""
Label: str
@validator("Label", pre=True, always=True)
def severity_uppercase(severity):
return severity.upper()
class Resource(BaseModel):
"""
Class representing a resource in the AWS Security Finding Format.
Attributes:
- Type (str): The type of the resource.
- Id (str): The unique identifier of the resource.
- Partition (str): The partition where the resource resides.
- Region (str): The region where the resource is located.
- Tags (Optional[dict]): Optional dictionary of tags associated with the resource.
This class defines the structure of a resource within the AWS Security Finding Format. It includes attributes to specify the type, unique identifier, partition, region, and optional tags of the resource.
"""
Type: str
Id: str
Partition: str
Region: str
Tags: Optional[dict]
@validator("Tags", pre=True, always=True)
def tags_cannot_be_empty_dict(tags):
if not tags:
return None
return tags
class Compliance(BaseModel):
"""
Class representing the compliance details of a finding in the AWS Security Finding Format.
Attributes:
- Status (str): The compliance status of the finding.
- RelatedRequirements (list[str]): A list of related compliance requirements for the finding.
- AssociatedStandards (list[dict]): A list of associated standards with the finding, where each item is a dictionary containing the 'StandardsId'.
This class defines the structure of compliance information within the AWS Security Finding Format. It includes attributes to specify the compliance status, related requirements, and associated standards of a finding.
"""
Status: str
RelatedRequirements: list[str]
AssociatedStandards: list[dict]
@validator("Status", pre=True, always=True)
def status(status):
if status not in ["PASSED", "WARNING", "FAILED", "NOT_AVAILABLE"]:
raise ValueError("must contain a space")
return status
class Recommendation(BaseModel):
"""
Class representing a recommendation for remediation in the AWS Security Finding Format.
Attributes:
- Text (str): The text description of the recommendation.
- Url (str): The URL link for additional information related to the recommendation.
This class defines the structure of a recommendation within the AWS Security Finding Format. It includes attributes to specify the text description and URL link for further details regarding the recommendation.
"""
Text: str = ""
Url: str = ""
@validator("Text", pre=True, always=True)
def text_must_not_exceed_512_chars(text):
text_validated = text
if len(text) > 512:
text_validated = text[:509] + "..."
return text_validated
@validator("Url", pre=True, always=True)
def set_default_url_if_empty(url):
default_url = "https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
if url:
default_url = url
return default_url
class Remediation(BaseModel):
"""
Class representing a remediation action in the AWS Security Finding Format.
Attributes:
- Recommendation (Recommendation): An instance of the Recommendation class providing details for remediation.
This class defines the structure of a remediation action within the AWS Security Finding Format. It includes an attribute to specify the recommendation for remediation, which is an instance of the Recommendation class.
"""
Recommendation: Recommendation
class AWSSecurityFindingFormat(BaseModel):
"""
AWSSecurityFindingFormat generates a finding's output in JSON ASFF format: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html
Attributes:
- SchemaVersion (str): The version of the ASFF schema being used, default value is "2018-10-08".
- Id (str): The unique identifier of the finding.
- ProductArn (str): The ARN of the product generating the finding.
- RecordState (str): The state of the finding record, default value is "ACTIVE".
- ProductFields (ProductFields): An instance of the ProductFields class representing the product fields of the finding.
- GeneratorId (str): The ID of the generator.
- AwsAccountId (str): The AWS account ID associated with the finding.
- Types (list[str]): A list of types associated with the finding, default value is None.
- FirstObservedAt (str): The timestamp when the finding was first observed.
- UpdatedAt (str): The timestamp when the finding was last updated.
- CreatedAt (str): The timestamp when the finding was created.
- Severity (Severity): An instance of the Severity class representing the severity of the finding.
- Title (str): The title of the finding.
- Description (str): The description of the finding, truncated to 1024 characters if longer.
- Resources (list[Resource]): A list of resources associated with the finding, default value is None.
- Compliance (Compliance): An instance of the Compliance class representing the compliance details of the finding.
- Remediation (Remediation): An instance of the Remediation class providing details for remediation.
This class defines the structure of a finding in the AWS Security Finding Format, including various attributes such as schema version, identifiers, timestamps, severity, title, description, resources, compliance details, and remediation information.
"""
SchemaVersion: str = "2018-10-08"
Id: str
ProductArn: str
RecordState: str = "ACTIVE"
ProductFields: ProductFields
GeneratorId: str
AwsAccountId: str
Types: list[str] = None
FirstObservedAt: str
UpdatedAt: str
CreatedAt: str
Severity: Severity
Title: str
Description: str
Resources: list[Resource] = None
Compliance: Compliance
Remediation: Remediation
@validator("Description", pre=True, always=True)
def description_must_not_exceed_1024_chars(description):
description_validated = description
if len(description) > 1024:
description_validated = description[:1021] + "..."
return description_validated

View File

@@ -2,6 +2,7 @@ from operator import attrgetter
from prowler.config.config import timestamp
from prowler.lib.logger import logger
from prowler.lib.outputs.common_models import FindingOutput
from prowler.lib.outputs.utils import unroll_list, unroll_tags
from prowler.lib.utils.utils import outputs_unix_timestamp
@@ -21,6 +22,87 @@ def get_provider_data_mapping(provider) -> dict:
return data
def generate_provider_output(provider, finding, csv_data) -> FindingOutput:
"""
generate_provider_output returns the provider's Finding output model
"""
# TODO: we have to standardize this between the above mapping and the provider.get_output_mapping()
try:
if provider.type == "aws":
# TODO: probably Organization UID is without the account id
csv_data["auth_method"] = f"profile: {csv_data['auth_method']}"
csv_data["resource_name"] = finding.resource_id
csv_data["resource_uid"] = finding.resource_arn
csv_data["region"] = finding.region
elif provider.type == "azure":
# TODO: we should show the authentication method used I think
csv_data["auth_method"] = (
f"{provider.identity.identity_type}: {provider.identity.identity_id}"
)
# Get the first tenant domain ID, just in case
csv_data["account_organization_uid"] = csv_data["account_organization_uid"][
0
]
csv_data["account_uid"] = (
csv_data["account_organization_uid"]
if "Tenant:" in finding.subscription
else provider.identity.subscriptions[finding.subscription]
)
csv_data["account_name"] = finding.subscription
csv_data["resource_name"] = finding.resource_name
csv_data["resource_uid"] = finding.resource_id
csv_data["region"] = finding.location
elif provider.type == "gcp":
csv_data["auth_method"] = f"Principal: {csv_data['auth_method']}"
csv_data["account_uid"] = provider.projects[finding.project_id].id
csv_data["account_name"] = provider.projects[finding.project_id].name
csv_data["account_tags"] = provider.projects[finding.project_id].labels
csv_data["resource_name"] = finding.resource_name
csv_data["resource_uid"] = finding.resource_id
csv_data["region"] = finding.location
if (
provider.projects
and finding.project_id in provider.projects
and getattr(provider.projects[finding.project_id], "organization")
):
csv_data["account_organization_uid"] = provider.projects[
finding.project_id
].organization.id
# TODO: for now is None since we don't retrieve that data
csv_data["account_organization"] = provider.projects[
finding.project_id
].organization.display_name
elif provider.type == "kubernetes":
if provider.identity.context == "In-Cluster":
csv_data["auth_method"] = "in-cluster"
else:
csv_data["auth_method"] = "kubeconfig"
csv_data["resource_name"] = finding.resource_name
csv_data["resource_uid"] = finding.resource_id
csv_data["account_name"] = f"context: {provider.identity.context}"
csv_data["region"] = f"namespace: {finding.namespace}"
# Finding Unique ID
# TODO: move this to a function
# TODO: in Azure, GCP and K8s there are fidings without resource_name
csv_data["finding_uid"] = (
f"prowler-{provider.type}-{finding.check_metadata.CheckID}-{csv_data['account_uid']}-{csv_data['region']}-{csv_data['resource_name']}"
)
finding_output = FindingOutput(**csv_data)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
return finding_output
# TODO: add test for outputs_unix_timestamp
def fill_common_finding_data(finding: dict, unix_timestamp: bool) -> dict:
finding_data = {

View File

@@ -0,0 +1,77 @@
from datetime import datetime
from enum import Enum
from typing import Optional, Union
from pydantic import BaseModel
from prowler.config.config import prowler_version
class Status(str, Enum):
PASS = "PASS"
FAIL = "FAIL"
MANUAL = "MANUAL"
class Severity(str, Enum):
critical = "critical"
high = "high"
medium = "medium"
low = "low"
informational = "informational"
class FindingOutput(BaseModel):
"""
FindingOutput generates a finding's output. It can be written to CSV or another format doing the mapping.
This is the base finding output model for every provider.
"""
auth_method: str
timestamp: Union[int, datetime]
account_uid: str
# Optional since depends on permissions
account_name: Optional[str]
# Optional since depends on permissions
account_email: Optional[str]
# Optional since depends on permissions
account_organization_uid: Optional[str]
# Optional since depends on permissions
account_organization_name: Optional[str]
# Optional since depends on permissions
account_tags: Optional[list[str]]
finding_uid: str
provider: str
check_id: str
check_title: str
check_type: str
status: Status
status_extended: str
muted: bool = False
service_name: str
subservice_name: str
severity: Severity
resource_type: str
resource_uid: str
resource_name: str
resource_details: str
resource_tags: str
# Only present for AWS and Azure
partition: Optional[str]
region: str
description: str
risk: str
related_url: str
remediation_recommendation_text: str
remediation_recommendation_url: str
remediation_code_nativeiac: str
remediation_code_terraform: str
remediation_code_cli: str
remediation_code_other: str
compliance: dict
categories: str
depends_on: str
related_to: str
notes: str
prowler_version: str = prowler_version

View File

@@ -1,97 +0,0 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.aws_well_architected.models import (
AWSWellArchitectedModel,
)
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
class AWSWellArchitected(ComplianceOutput):
"""
This class represents the AWS Well-Architected compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into AWS Well-Architected compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into AWS Well-Architected compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = AWSWellArchitectedModel(
Provider=finding.provider,
Description=compliance.Description,
AccountId=finding.account_uid,
Region=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Name=attribute.Name,
Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId,
Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_SubSection=attribute.SubSection,
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = AWSWellArchitectedModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
AccountId="",
Region="",
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Name=attribute.Name,
Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId,
Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_SubSection=attribute.SubSection,
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl,
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)

View File

@@ -1,32 +0,0 @@
from typing import Optional
from pydantic import BaseModel
class AWSWellArchitectedModel(BaseModel):
"""
AWSWellArchitectedModel generates a finding's output in AWS Well-Architected Framework format.
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_Name: str
Requirements_Attributes_WellArchitectedQuestionId: str
Requirements_Attributes_WellArchitectedPracticeId: str
Requirements_Attributes_Section: str
Requirements_Attributes_SubSection: Optional[str]
Requirements_Attributes_LevelOfRisk: str
Requirements_Attributes_AssessmentMethod: str
Requirements_Attributes_Description: str
Requirements_Attributes_ImplementationGuidanceUrl: str
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
Muted: bool
ResourceName: str

View File

@@ -0,0 +1,60 @@
from csv import DictWriter
from prowler.config.config import timestamp
from prowler.lib.logger import logger
from prowler.lib.outputs.compliance.models import Check_Output_CSV_AWS_Well_Architected
from prowler.lib.outputs.csv.csv import generate_csv_fields
from prowler.lib.utils.utils import outputs_unix_timestamp
def write_compliance_row_aws_well_architected_framework(
file_descriptors, finding, compliance, output_options, provider
):
try:
compliance_output = compliance.Framework
if compliance.Version != "":
compliance_output += "_" + compliance.Version
if compliance.Provider != "":
compliance_output += "_" + compliance.Provider
compliance_output = compliance_output.lower().replace("-", "_")
csv_header = generate_csv_fields(Check_Output_CSV_AWS_Well_Architected)
csv_writer = DictWriter(
file_descriptors[compliance_output],
fieldnames=csv_header,
delimiter=";",
)
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
for attribute in requirement.Attributes:
compliance_row = Check_Output_CSV_AWS_Well_Architected(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=provider.identity.account,
Region=finding.region,
AssessmentDate=outputs_unix_timestamp(
output_options.unix_timestamp, timestamp
),
Requirements_Id=requirement_id,
Requirements_Description=requirement_description,
Requirements_Attributes_Name=attribute.Name,
Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId,
Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_SubSection=attribute.SubSection,
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
Muted=finding.muted,
)
csv_writer.writerow(compliance_row.__dict__)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)

View File

@@ -2,6 +2,75 @@ from colorama import Fore, Style
from tabulate import tabulate
from prowler.config.config import orange_color
from prowler.lib.logger import logger
from prowler.lib.outputs.compliance.cis_aws import generate_compliance_row_cis_aws
from prowler.lib.outputs.compliance.cis_azure import generate_compliance_row_cis_azure
from prowler.lib.outputs.compliance.cis_gcp import generate_compliance_row_cis_gcp
from prowler.lib.outputs.compliance.cis_kubernetes import (
generate_compliance_row_cis_kubernetes,
)
from prowler.lib.outputs.csv.csv import write_csv
def write_compliance_row_cis(
file_descriptors,
finding,
compliance,
output_options,
provider,
input_compliance_frameworks,
):
try:
compliance_output = (
"cis_" + compliance.Version + "_" + compliance.Provider.lower()
)
# Only with the version of CIS that was selected
if compliance_output in str(input_compliance_frameworks):
for requirement in compliance.Requirements:
for attribute in requirement.Attributes:
if compliance.Provider == "AWS":
(compliance_row, csv_header) = generate_compliance_row_cis_aws(
finding,
compliance,
requirement,
attribute,
output_options,
provider,
)
elif compliance.Provider == "Azure":
(compliance_row, csv_header) = (
generate_compliance_row_cis_azure(
finding,
compliance,
requirement,
attribute,
output_options,
)
)
elif compliance.Provider == "GCP":
(compliance_row, csv_header) = generate_compliance_row_cis_gcp(
finding, compliance, requirement, attribute, output_options
)
elif compliance.Provider == "Kubernetes":
(compliance_row, csv_header) = (
generate_compliance_row_cis_kubernetes(
finding,
compliance,
requirement,
attribute,
output_options,
provider,
)
)
write_csv(
file_descriptors[compliance_output], csv_header, compliance_row
)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def get_cis_table(

View File

@@ -1,97 +0,0 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.cis.models import AWSCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
class AWSCIS(ComplianceOutput):
"""
This class represents the AWS CIS compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into AWS CIS compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into AWS CIS compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = AWSCISModel(
Provider=finding.provider,
Description=compliance.Description,
AccountId=finding.account_uid,
Region=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = AWSCISModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
AccountId="",
Region="",
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)

View File

@@ -1,99 +0,0 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.cis.models import AzureCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
class AzureCIS(ComplianceOutput):
"""
This class represents the Azure CIS compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into Azure CIS compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into Azure CIS compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = AzureCISModel(
Provider=finding.provider,
Description=compliance.Description,
Subscription=finding.account_name,
Location=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
Requirements_Attributes_References=attribute.References,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = AzureCISModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
Subscription="",
Location="",
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
Requirements_Attributes_References=attribute.References,
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)

View File

@@ -1,97 +0,0 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.cis.models import GCPCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
class GCPCIS(ComplianceOutput):
"""
This class represents the GCP CIS compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into GCP CIS compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into GCP CIS compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = GCPCISModel(
Provider=finding.provider,
Description=compliance.Description,
ProjectId=finding.account_uid,
Location=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = GCPCISModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
ProjectId="",
Location="",
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)

View File

@@ -1,101 +0,0 @@
from datetime import datetime
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.cis.models import KubernetesCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
class KubernetesCIS(ComplianceOutput):
"""
This class represents the Kubernetes CIS compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into Kubernetes CIS compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into Kubernetes CIS compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = KubernetesCISModel(
Provider=finding.provider,
Description=compliance.Description,
Context=finding.account_name,
Namespace=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = KubernetesCISModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
Context="",
Namespace="",
AssessmentDate=str(datetime.now()),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)

View File

@@ -1,162 +0,0 @@
from pydantic import BaseModel
class AWSCISModel(BaseModel):
"""
AWSCISModel generates a finding's output in AWS CIS Compliance format.
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_Section: str
Requirements_Attributes_Profile: str
Requirements_Attributes_AssessmentStatus: str
Requirements_Attributes_Description: str
Requirements_Attributes_RationaleStatement: str
Requirements_Attributes_ImpactStatement: str
Requirements_Attributes_RemediationProcedure: str
Requirements_Attributes_AuditProcedure: str
Requirements_Attributes_AdditionalInformation: str
Requirements_Attributes_References: str
Status: str
StatusExtended: str
ResourceId: str
ResourceName: str
CheckId: str
Muted: bool
class AzureCISModel(BaseModel):
"""
AzureCISModel generates a finding's output in Azure CIS Compliance format.
"""
Provider: str
Description: str
Subscription: str
Location: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_Section: str
Requirements_Attributes_Profile: str
Requirements_Attributes_AssessmentStatus: str
Requirements_Attributes_Description: str
Requirements_Attributes_RationaleStatement: str
Requirements_Attributes_ImpactStatement: str
Requirements_Attributes_RemediationProcedure: str
Requirements_Attributes_AuditProcedure: str
Requirements_Attributes_AdditionalInformation: str
Requirements_Attributes_DefaultValue: str
Requirements_Attributes_References: str
Status: str
StatusExtended: str
ResourceId: str
ResourceName: str
CheckId: str
Muted: bool
class GCPCISModel(BaseModel):
"""
GCPCISModel generates a finding's output in GCP CIS Compliance format.
"""
Provider: str
Description: str
ProjectId: str
Location: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_Section: str
Requirements_Attributes_Profile: str
Requirements_Attributes_AssessmentStatus: str
Requirements_Attributes_Description: str
Requirements_Attributes_RationaleStatement: str
Requirements_Attributes_ImpactStatement: str
Requirements_Attributes_RemediationProcedure: str
Requirements_Attributes_AuditProcedure: str
Requirements_Attributes_AdditionalInformation: str
Requirements_Attributes_References: str
Status: str
StatusExtended: str
ResourceId: str
ResourceName: str
CheckId: str
Muted: bool
class KubernetesCISModel(BaseModel):
"""
KubernetesCISModel generates a finding's output in Kubernetes CIS Compliance format.
"""
Provider: str
Description: str
Context: str
Namespace: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_Section: str
Requirements_Attributes_Profile: str
Requirements_Attributes_AssessmentStatus: str
Requirements_Attributes_Description: str
Requirements_Attributes_RationaleStatement: str
Requirements_Attributes_ImpactStatement: str
Requirements_Attributes_RemediationProcedure: str
Requirements_Attributes_AuditProcedure: str
Requirements_Attributes_AdditionalInformation: str
Requirements_Attributes_References: str
Requirements_Attributes_DefaultValue: str
Status: str
StatusExtended: str
ResourceId: str
ResourceName: str
CheckId: str
Muted: bool
# TODO: Create a parent class for the common fields of CIS and have the specific classes from each provider to inherit from it.
# It is not done yet because it is needed to respect the current order of the fields in the output file.
# class AWS(CIS):
# """
# AWS CIS Compliance format.
# """
# AccountId: str
# Region: str
# class Azure(CIS):
# """
# Azure CIS Compliance format.
# """
# Subscription: str
# Location: str
# class GCP(CIS):
# """
# GCP CIS Compliance format.
# """
# ProjectId: str
# Location: str
# class Kubernetes(CIS):
# """
# Kubernetes CIS Compliance format.
# """
# Context: str
# Namespace: str

View File

@@ -0,0 +1,36 @@
from prowler.config.config import timestamp
from prowler.lib.outputs.compliance.models import Check_Output_CSV_AWS_CIS
from prowler.lib.outputs.csv.csv import generate_csv_fields
from prowler.lib.utils.utils import outputs_unix_timestamp
def generate_compliance_row_cis_aws(
finding, compliance, requirement, attribute, output_options, provider
):
compliance_row = Check_Output_CSV_AWS_CIS(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=provider.identity.account,
Region=finding.region,
AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
Muted=finding.muted,
)
csv_header = generate_csv_fields(Check_Output_CSV_AWS_CIS)
return compliance_row, csv_header

View File

@@ -0,0 +1,37 @@
from prowler.config.config import timestamp
from prowler.lib.outputs.compliance.models import Check_Output_CSV_AZURE_CIS
from prowler.lib.outputs.csv.csv import generate_csv_fields
from prowler.lib.utils.utils import outputs_unix_timestamp
def generate_compliance_row_cis_azure(
finding, compliance, requirement, attribute, output_options
):
compliance_row = Check_Output_CSV_AZURE_CIS(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
Subscription=finding.subscription,
AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
Requirements_Attributes_References=attribute.References,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
ResourceName=finding.resource_name,
CheckId=finding.check_metadata.CheckID,
Muted=finding.muted,
)
csv_header = generate_csv_fields(Check_Output_CSV_AZURE_CIS)
return compliance_row, csv_header

View File

@@ -0,0 +1,37 @@
from prowler.config.config import timestamp
from prowler.lib.outputs.compliance.models import Check_Output_CSV_GCP_CIS
from prowler.lib.outputs.csv.csv import generate_csv_fields
from prowler.lib.utils.utils import outputs_unix_timestamp
def generate_compliance_row_cis_gcp(
finding, compliance, requirement, attribute, output_options
):
compliance_row = Check_Output_CSV_GCP_CIS(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
ProjectId=finding.project_id,
Location=finding.location.lower(),
AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
ResourceName=finding.resource_name,
CheckId=finding.check_metadata.CheckID,
Muted=finding.muted,
)
csv_header = generate_csv_fields(Check_Output_CSV_GCP_CIS)
return compliance_row, csv_header

View File

@@ -0,0 +1,37 @@
from prowler.config.config import timestamp
from prowler.lib.outputs.compliance.models import Check_Output_CSV_KUBERNETES_CIS
from prowler.lib.outputs.csv.csv import generate_csv_fields
from prowler.lib.utils.utils import outputs_unix_timestamp
def generate_compliance_row_cis_kubernetes(
finding, compliance, requirement, attribute, output_options, provider
):
compliance_row = Check_Output_CSV_KUBERNETES_CIS(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
Context=provider.identity.context,
Namespace=finding.namespace,
AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
Muted=finding.muted,
)
csv_header = generate_csv_fields(Check_Output_CSV_KUBERNETES_CIS)
return compliance_row, csv_header

View File

@@ -2,16 +2,149 @@ import sys
from prowler.lib.check.models import Check_Report
from prowler.lib.logger import logger
from prowler.lib.outputs.compliance.cis.cis import get_cis_table
from prowler.lib.outputs.compliance.ens.ens import get_ens_table
from prowler.lib.outputs.compliance.generic.generic_table import (
from prowler.lib.outputs.compliance.aws_well_architected_framework import (
write_compliance_row_aws_well_architected_framework,
)
from prowler.lib.outputs.compliance.cis import get_cis_table, write_compliance_row_cis
from prowler.lib.outputs.compliance.ens_rd2022_aws import (
get_ens_rd2022_aws_table,
write_compliance_row_ens_rd2022_aws,
)
from prowler.lib.outputs.compliance.generic import (
get_generic_compliance_table,
write_compliance_row_generic,
)
from prowler.lib.outputs.compliance.iso27001_2013_aws import (
write_compliance_row_iso27001_2013_aws,
)
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack import (
get_mitre_attack_table,
write_compliance_row_mitre_attack,
)
def add_manual_controls(
output_options, provider, file_descriptors, input_compliance_frameworks
):
try:
# Check if MANUAL control was already added to output
if "manual_check" in output_options.bulk_checks_metadata:
manual_finding = Check_Report(
output_options.bulk_checks_metadata["manual_check"].json()
)
manual_finding.status = "MANUAL"
manual_finding.status_extended = "Manual check"
manual_finding.resource_id = "manual_check"
manual_finding.resource_name = "Manual check"
manual_finding.region = ""
manual_finding.location = ""
manual_finding.project_id = ""
manual_finding.subscription = ""
manual_finding.namespace = ""
fill_compliance(
output_options,
manual_finding,
provider,
file_descriptors,
input_compliance_frameworks,
)
del output_options.bulk_checks_metadata["manual_check"]
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def get_check_compliance_frameworks_in_input(
check_id, bulk_checks_metadata, input_compliance_frameworks
):
"""get_check_compliance_frameworks_in_input returns a list of Compliance for the given check if the compliance framework is present in the input compliance to execute"""
check_compliances = []
try:
if bulk_checks_metadata and bulk_checks_metadata.get(check_id):
for compliance in bulk_checks_metadata[check_id].Compliance:
compliance_name = ""
if compliance.Version:
compliance_name = (
compliance.Framework.lower()
+ "_"
+ compliance.Version.lower()
+ "_"
+ compliance.Provider.lower()
)
else:
compliance_name = (
compliance.Framework.lower() + "_" + compliance.Provider.lower()
)
if compliance_name.replace("-", "_") in input_compliance_frameworks:
check_compliances.append(compliance)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return check_compliances
def fill_compliance(
output_options, finding, provider, file_descriptors, input_compliance_frameworks
):
try:
# We have to retrieve all the check's compliance requirements and get the ones matching with the input ones
check_compliances = get_check_compliance_frameworks_in_input(
finding.check_metadata.CheckID,
output_options.bulk_checks_metadata,
input_compliance_frameworks,
)
for compliance in check_compliances:
if compliance.Framework == "ENS" and compliance.Version == "RD2022":
write_compliance_row_ens_rd2022_aws(
file_descriptors, finding, compliance, output_options, provider
)
elif compliance.Framework == "CIS":
write_compliance_row_cis(
file_descriptors,
finding,
compliance,
output_options,
provider,
input_compliance_frameworks,
)
elif (
"AWS-Well-Architected-Framework" in compliance.Framework
and compliance.Provider == "AWS"
):
write_compliance_row_aws_well_architected_framework(
file_descriptors, finding, compliance, output_options, provider
)
elif (
compliance.Framework == "ISO27001"
and compliance.Version == "2013"
and compliance.Provider == "AWS"
):
write_compliance_row_iso27001_2013_aws(
file_descriptors, finding, compliance, output_options, provider
)
elif compliance.Framework == "MITRE-ATTACK" and compliance.Version == "":
write_compliance_row_mitre_attack(
file_descriptors, finding, compliance, provider
)
else:
write_compliance_row_generic(
file_descriptors, finding, compliance, output_options, provider
)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def display_compliance_table(
findings: list,
bulk_checks_metadata: dict,
@@ -19,24 +152,10 @@ def display_compliance_table(
output_filename: str,
output_directory: str,
compliance_overview: bool,
) -> None:
"""
display_compliance_table generates the compliance table for the given compliance framework.
Args:
findings (list): The list of findings
bulk_checks_metadata (dict): The bulk checks metadata
compliance_framework (str): The compliance framework to generate the table
output_filename (str): The output filename
output_directory (str): The output directory
compliance_overview (bool): The compliance
Returns:
None
"""
):
try:
if "ens_" in compliance_framework:
get_ens_table(
if "ens_rd2022_aws" == compliance_framework:
get_ens_rd2022_aws_table(
findings,
bulk_checks_metadata,
compliance_framework,
@@ -78,10 +197,7 @@ def display_compliance_table(
sys.exit(1)
# TODO: this should be in the Check class
def get_check_compliance(
finding: Check_Report, provider_type: str, bulk_checks_metadata: dict
) -> dict:
def get_check_compliance(finding, provider_type, output_options) -> dict:
"""get_check_compliance returns a map with the compliance framework as key and the requirements where the finding's check is present.
Example:
@@ -90,20 +206,12 @@ def get_check_compliance(
"CIS-1.4": ["2.1.3"],
"CIS-1.5": ["2.1.3"],
}
Args:
finding (Any): The Check_Report finding
provider_type (str): The provider type
bulk_checks_metadata (dict): The bulk checks metadata
Returns:
dict: The compliance framework as key and the requirements where the finding's check is present.
"""
try:
check_compliance = {}
# We have to retrieve all the check's compliance requirements
if finding.check_metadata.CheckID in bulk_checks_metadata:
for compliance in bulk_checks_metadata[
if finding.check_metadata.CheckID in output_options.bulk_checks_metadata:
for compliance in output_options.bulk_checks_metadata[
finding.check_metadata.CheckID
].Compliance:
compliance_fw = compliance.Framework

View File

@@ -1,81 +0,0 @@
from csv import DictWriter
from pathlib import Path
from typing import List
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.logger import logger
from prowler.lib.outputs.finding import Finding
from prowler.lib.outputs.output import Output
class ComplianceOutput(Output):
"""
This class represents an abstract base class for defining different types of outputs for findings.
Attributes:
_data (list): A list to store transformed data from findings.
_file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
__init__: Initializes the Output class with findings, optionally creates a file descriptor.
data: Property to access the transformed data.
file_descriptor: Property to access the file descriptor.
transform: Abstract method to transform findings into a specific format.
batch_write_data_to_file: Abstract method to write data to a file in batches.
create_file_descriptor: Method to create a file descriptor for writing data to a file.
"""
def __init__(
self,
findings: List[Finding],
compliance: ComplianceBaseModel,
create_file_descriptor: bool = False,
file_path: str = None,
file_extension: str = "",
) -> None:
self._data = []
if not file_extension and file_path:
self._file_extension = "".join(Path(file_path).suffixes)
if file_extension:
self._file_extension = file_extension
if findings:
# Get the compliance name of the model
compliance_name = (
compliance.Framework + "-" + compliance.Version
if compliance.Version
else compliance.Framework
)
self.transform(findings, compliance, compliance_name)
if create_file_descriptor:
self.create_file_descriptor(file_path)
def batch_write_data_to_file(self) -> None:
"""
Writes the findings data to a CSV file in the specific compliance format.
Returns:
- None
"""
try:
if (
getattr(self, "_file_descriptor", None)
and not self._file_descriptor.closed
and self._data
):
csv_writer = DictWriter(
self._file_descriptor,
fieldnames=[field.upper() for field in self._data[0].dict().keys()],
delimiter=";",
)
csv_writer.writeheader()
for finding in self._data:
csv_writer.writerow(
{k.upper(): v for k, v in finding.dict().items()}
)
self._file_descriptor.close()
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)

View File

@@ -1,103 +0,0 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.ens.models import AWSENSModel
from prowler.lib.outputs.finding import Finding
class AWSENS(ComplianceOutput):
"""
This class represents the AWS ENS compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into AWS ENS compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into AWS ENS compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = AWSENSModel(
Provider=finding.provider,
Description=compliance.Description,
AccountId=finding.account_uid,
Region=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_IdGrupoControl=attribute.IdGrupoControl,
Requirements_Attributes_Marco=attribute.Marco,
Requirements_Attributes_Categoria=attribute.Categoria,
Requirements_Attributes_DescripcionControl=attribute.DescripcionControl,
Requirements_Attributes_Nivel=attribute.Nivel,
Requirements_Attributes_Tipo=attribute.Tipo,
Requirements_Attributes_Dimensiones=",".join(
attribute.Dimensiones
),
Requirements_Attributes_ModoEjecucion=attribute.ModoEjecucion,
Requirements_Attributes_Dependencias=",".join(
attribute.Dependencias
),
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = AWSENSModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
AccountId="",
Region="",
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_IdGrupoControl=attribute.IdGrupoControl,
Requirements_Attributes_Marco=attribute.Marco,
Requirements_Attributes_Categoria=attribute.Categoria,
Requirements_Attributes_DescripcionControl=attribute.DescripcionControl,
Requirements_Attributes_Nivel=attribute.Nivel,
Requirements_Attributes_Tipo=attribute.Tipo,
Requirements_Attributes_Dimensiones=",".join(
attribute.Dimensiones
),
Requirements_Attributes_ModoEjecucion=attribute.ModoEjecucion,
Requirements_Attributes_Dependencias=",".join(
attribute.Dependencias
),
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)

View File

@@ -1,30 +0,0 @@
from pydantic import BaseModel
class AWSENSModel(BaseModel):
"""
AWSENSModel generates a finding's output in CSV ENS format for AWS.
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_IdGrupoControl: str
Requirements_Attributes_Marco: str
Requirements_Attributes_Categoria: str
Requirements_Attributes_DescripcionControl: str
Requirements_Attributes_Nivel: str
Requirements_Attributes_Tipo: str
Requirements_Attributes_Dimensiones: str
Requirements_Attributes_ModoEjecucion: str
Requirements_Attributes_Dependencias: str
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
Muted: bool
ResourceName: str

View File

@@ -1,10 +1,58 @@
from csv import DictWriter
from colorama import Fore, Style
from tabulate import tabulate
from prowler.config.config import orange_color
from prowler.config.config import orange_color, timestamp
from prowler.lib.outputs.compliance.models import Check_Output_CSV_ENS_RD2022
from prowler.lib.outputs.csv.csv import generate_csv_fields
from prowler.lib.utils.utils import outputs_unix_timestamp
def get_ens_table(
def write_compliance_row_ens_rd2022_aws(
file_descriptors, finding, compliance, output_options, provider
):
compliance_output = "ens_rd2022_aws"
csv_header = generate_csv_fields(Check_Output_CSV_ENS_RD2022)
csv_writer = DictWriter(
file_descriptors[compliance_output],
fieldnames=csv_header,
delimiter=";",
)
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
for attribute in requirement.Attributes:
compliance_row = Check_Output_CSV_ENS_RD2022(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=provider.identity.account,
Region=finding.region,
AssessmentDate=outputs_unix_timestamp(
output_options.unix_timestamp, timestamp
),
Requirements_Id=requirement_id,
Requirements_Description=requirement_description,
Requirements_Attributes_IdGrupoControl=attribute.IdGrupoControl,
Requirements_Attributes_Marco=attribute.Marco,
Requirements_Attributes_Categoria=attribute.Categoria,
Requirements_Attributes_DescripcionControl=attribute.DescripcionControl,
Requirements_Attributes_Nivel=attribute.Nivel,
Requirements_Attributes_Tipo=attribute.Tipo,
Requirements_Attributes_Dimensiones=",".join(attribute.Dimensiones),
Requirements_Attributes_ModoEjecucion=attribute.ModoEjecucion,
Requirements_Attributes_Dependencias=",".join(attribute.Dependencias),
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
Muted=finding.muted,
)
csv_writer.writerow(compliance_row.__dict__)
def get_ens_rd2022_aws_table(
findings: list,
bulk_checks_metadata: dict,
compliance_framework: str,
@@ -30,7 +78,11 @@ def get_ens_table(
check = bulk_checks_metadata[finding.check_metadata.CheckID]
check_compliances = check.Compliance
for compliance in check_compliances:
if compliance.Framework == "ENS" and compliance.Provider == "AWS":
if (
compliance.Framework == "ENS"
and compliance.Provider == "AWS"
and compliance.Version == "RD2022"
):
for requirement in compliance.Requirements:
for attribute in requirement.Attributes:
marco_categoria = f"{attribute.Marco}/{attribute.Categoria}"

View File

@@ -0,0 +1,105 @@
from csv import DictWriter
from colorama import Fore, Style
from tabulate import tabulate
from prowler.config.config import orange_color, timestamp
from prowler.lib.outputs.compliance.models import Check_Output_CSV_Generic_Compliance
from prowler.lib.outputs.csv.csv import generate_csv_fields
from prowler.lib.utils.utils import outputs_unix_timestamp
def write_compliance_row_generic(
file_descriptors, finding, compliance, output_options, provider
):
compliance_output = compliance.Framework
if compliance.Version != "":
compliance_output += "_" + compliance.Version
if compliance.Provider != "":
compliance_output += "_" + compliance.Provider
compliance_output = compliance_output.lower().replace("-", "_")
csv_header = generate_csv_fields(Check_Output_CSV_Generic_Compliance)
csv_writer = DictWriter(
file_descriptors[compliance_output],
fieldnames=csv_header,
delimiter=";",
)
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
for attribute in requirement.Attributes:
compliance_row = Check_Output_CSV_Generic_Compliance(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=provider.identity.account,
Region=finding.region,
AssessmentDate=outputs_unix_timestamp(
output_options.unix_timestamp, timestamp
),
Requirements_Id=requirement_id,
Requirements_Description=requirement_description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_SubSection=attribute.SubSection,
Requirements_Attributes_SubGroup=attribute.SubGroup,
Requirements_Attributes_Service=attribute.Service,
Requirements_Attributes_Type=attribute.Type,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
Muted=finding.muted,
)
csv_writer.writerow(compliance_row.__dict__)
def get_generic_compliance_table(
findings: list,
bulk_checks_metadata: dict,
compliance_framework: str,
output_filename: str,
output_directory: str,
compliance_overview: bool,
):
pass_count = []
fail_count = []
muted_count = []
for index, finding in enumerate(findings):
check = bulk_checks_metadata[finding.check_metadata.CheckID]
check_compliances = check.Compliance
for compliance in check_compliances:
if (
compliance.Framework.upper()
in compliance_framework.upper().replace("_", "-")
and compliance.Version in compliance_framework.upper()
and compliance.Provider in compliance_framework.upper()
):
for requirement in compliance.Requirements:
for attribute in requirement.Attributes:
if finding.muted:
if index not in muted_count:
muted_count.append(index)
else:
if finding.status == "FAIL" and index not in fail_count:
fail_count.append(index)
elif finding.status == "PASS" and index not in pass_count:
pass_count.append(index)
if (
len(fail_count) + len(pass_count) + len(muted_count) > 1
): # If there are no resources, don't print the compliance table
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))
if not compliance_overview:
print(f"\nDetailed results of {compliance_framework.upper()} are in:")
print(
f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n"
)

View File

@@ -1,87 +0,0 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.generic.models import GenericComplianceModel
from prowler.lib.outputs.finding import Finding
class GenericCompliance(ComplianceOutput):
"""
This class represents the Generic compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into Generic compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into Generic compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = GenericComplianceModel(
Provider=finding.provider,
Description=compliance.Description,
AccountId=finding.account_uid,
Region=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_SubSection=attribute.SubSection,
Requirements_Attributes_SubGroup=attribute.SubGroup,
Requirements_Attributes_Service=attribute.Service,
Requirements_Attributes_Type=attribute.Type,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = GenericComplianceModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
AccountId="",
Region="",
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_SubSection=attribute.SubSection,
Requirements_Attributes_SubGroup=attribute.SubGroup,
Requirements_Attributes_Service=attribute.Service,
Requirements_Attributes_Type=attribute.Type,
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)

View File

@@ -1,54 +0,0 @@
from colorama import Fore, Style
from tabulate import tabulate
from prowler.config.config import orange_color
def get_generic_compliance_table(
findings: list,
bulk_checks_metadata: dict,
compliance_framework: str,
output_filename: str,
output_directory: str,
compliance_overview: bool,
):
pass_count = []
fail_count = []
muted_count = []
for index, finding in enumerate(findings):
check = bulk_checks_metadata[finding.check_metadata.CheckID]
check_compliances = check.Compliance
for compliance in check_compliances:
if (
compliance.Framework.upper()
in compliance_framework.upper().replace("_", "-")
and compliance.Version in compliance_framework.upper()
and compliance.Provider in compliance_framework.upper()
):
if finding.muted:
if index not in muted_count:
muted_count.append(index)
else:
if finding.status == "FAIL" and index not in fail_count:
fail_count.append(index)
elif finding.status == "PASS" and index not in pass_count:
pass_count.append(index)
if (
len(fail_count) + len(pass_count) + len(muted_count) > 1
): # If there are no resources, don't print the compliance table
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))
if not compliance_overview:
print(f"\nDetailed results of {compliance_framework.upper()} are in:")
print(
f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n"
)

View File

@@ -1,28 +0,0 @@
from typing import Optional
from pydantic import BaseModel
class GenericComplianceModel(BaseModel):
"""
GenericComplianceModel generates a finding's output in Generic Compliance format.
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_Section: Optional[str]
Requirements_Attributes_SubSection: Optional[str]
Requirements_Attributes_SubGroup: Optional[str]
Requirements_Attributes_Service: Optional[str]
Requirements_Attributes_Type: Optional[str]
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
Muted: bool
ResourceName: str

View File

@@ -1,81 +0,0 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.iso27001.models import AWSISO27001Model
from prowler.lib.outputs.finding import Finding
class AWSISO27001(ComplianceOutput):
"""
This class represents the AWS ISO 27001 compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into AWS ENS compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into AWS ENS compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = AWSISO27001Model(
Provider=finding.provider,
Description=compliance.Description,
AccountId=finding.account_uid,
Region=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Attributes_Category=attribute.Category,
Requirements_Attributes_Objetive_ID=attribute.Objetive_ID,
Requirements_Attributes_Objetive_Name=attribute.Objetive_Name,
Requirements_Attributes_Check_Summary=attribute.Check_Summary,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
CheckId=finding.check_id,
Muted=finding.muted,
ResourceName=finding.resource_name,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = AWSISO27001Model(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
AccountId="",
Region="",
AssessmentDate=str(finding.timestamp),
Requirements_Attributes_Category=attribute.Category,
Requirements_Attributes_Objetive_ID=attribute.Objetive_ID,
Requirements_Attributes_Objetive_Name=attribute.Objetive_Name,
Requirements_Attributes_Check_Summary=attribute.Check_Summary,
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)

View File

@@ -1,23 +0,0 @@
from pydantic import BaseModel
class AWSISO27001Model(BaseModel):
"""
AWSISO27001Model generates a finding's output in CSV AWS ISO27001 format.
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Attributes_Category: str
Requirements_Attributes_Objetive_ID: str
Requirements_Attributes_Objetive_Name: str
Requirements_Attributes_Check_Summary: str
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
Muted: bool
ResourceName: str

View File

@@ -0,0 +1,52 @@
from csv import DictWriter
from prowler.config.config import timestamp
from prowler.lib.outputs.compliance.models import Check_Output_CSV_AWS_ISO27001_2013
from prowler.lib.outputs.csv.csv import generate_csv_fields
from prowler.lib.utils.utils import outputs_unix_timestamp
def write_compliance_row_iso27001_2013_aws(
file_descriptors, finding, compliance, output_options, provider
):
compliance_output = compliance.Framework
if compliance.Version != "":
compliance_output += "_" + compliance.Version
if compliance.Provider != "":
compliance_output += "_" + compliance.Provider
compliance_output = compliance_output.lower().replace("-", "_")
csv_header = generate_csv_fields(Check_Output_CSV_AWS_ISO27001_2013)
csv_writer = DictWriter(
file_descriptors[compliance_output],
fieldnames=csv_header,
delimiter=";",
)
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
requirement_name = requirement.Name
for attribute in requirement.Attributes:
compliance_row = Check_Output_CSV_AWS_ISO27001_2013(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=provider.identity.account,
Region=finding.region,
AssessmentDate=outputs_unix_timestamp(
output_options.unix_timestamp, timestamp
),
Requirements_Id=requirement_id,
Requirements_Name=requirement_name,
Requirements_Description=requirement_description,
Requirements_Attributes_Category=attribute.Category,
Requirements_Attributes_Objetive_ID=attribute.Objetive_ID,
Requirements_Attributes_Objetive_Name=attribute.Objetive_Name,
Requirements_Attributes_Check_Summary=attribute.Check_Summary,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
Muted=finding.muted,
)
csv_writer.writerow(compliance_row.__dict__)

View File

@@ -1,7 +1,101 @@
from csv import DictWriter
from importlib import import_module
from colorama import Fore, Style
from tabulate import tabulate
from prowler.config.config import orange_color
from prowler.config.config import orange_color, timestamp
from prowler.lib.logger import logger
from prowler.lib.outputs.csv.csv import generate_csv_fields
from prowler.lib.outputs.utils import unroll_list
from prowler.lib.utils.utils import outputs_unix_timestamp
def write_compliance_row_mitre_attack(file_descriptors, finding, compliance, provider):
try:
compliance_output = compliance.Framework
if compliance.Version != "":
compliance_output += "_" + compliance.Version
if compliance.Provider != "":
compliance_output += "_" + compliance.Provider
mitre_attack_model_name = "MitreAttack" + compliance.Provider
module = import_module("prowler.lib.outputs.compliance.mitre_attack.models")
mitre_attack_model = getattr(module, mitre_attack_model_name)
compliance_output = compliance_output.lower().replace("-", "_")
csv_header = generate_csv_fields(mitre_attack_model)
csv_writer = DictWriter(
file_descriptors[compliance_output],
fieldnames=csv_header,
delimiter=";",
)
for requirement in compliance.Requirements:
if compliance.Provider == "AWS":
attributes_services = ", ".join(
attribute.AWSService for attribute in requirement.Attributes
)
elif compliance.Provider == "Azure":
attributes_services = ", ".join(
attribute.AzureService for attribute in requirement.Attributes
)
elif compliance.Provider == "GCP":
attributes_services = ", ".join(
attribute.GCPService for attribute in requirement.Attributes
)
requirement_description = requirement.Description
requirement_id = requirement.Id
requirement_name = requirement.Name
attributes_categories = ", ".join(
attribute.Category for attribute in requirement.Attributes
)
attributes_values = ", ".join(
attribute.Value for attribute in requirement.Attributes
)
attributes_comments = ", ".join(
attribute.Comment for attribute in requirement.Attributes
)
common_data = {
"Provider": finding.check_metadata.Provider,
"Description": compliance.Description,
"AssessmentDate": outputs_unix_timestamp(
provider.output_options.unix_timestamp, timestamp
),
"Requirements_Id": requirement_id,
"Requirements_Name": requirement_name,
"Requirements_Description": requirement_description,
"Requirements_Tactics": unroll_list(requirement.Tactics),
"Requirements_SubTechniques": unroll_list(requirement.SubTechniques),
"Requirements_Platforms": unroll_list(requirement.Platforms),
"Requirements_TechniqueURL": requirement.TechniqueURL,
"Requirements_Attributes_Services": attributes_services,
"Requirements_Attributes_Categories": attributes_categories,
"Requirements_Attributes_Values": attributes_values,
"Requirements_Attributes_Comments": attributes_comments,
"Status": finding.status,
"StatusExtended": finding.status_extended,
"ResourceId": finding.resource_id,
"CheckId": finding.check_metadata.CheckID,
"Muted": finding.muted,
}
if compliance.Provider == "AWS":
common_data["AccountId"] = provider.identity.account
common_data["Region"] = finding.region
elif compliance.Provider == "Azure":
common_data["SubscriptionId"] = unroll_list(
provider.identity.subscriptions
)
elif compliance.Provider == "GCP":
common_data["ProjectId"] = unroll_list(provider.projects)
compliance_row = mitre_attack_model(**common_data)
csv_writer.writerow(compliance_row.__dict__)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def get_mitre_attack_table(

View File

@@ -1,115 +0,0 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.mitre_attack.models import AWSMitreAttackModel
from prowler.lib.outputs.finding import Finding
from prowler.lib.outputs.utils import unroll_list
class AWSMitreAttack(ComplianceOutput):
"""
This class represents the AWS MITRE ATT&CK compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into AWS MITRE ATT&CK compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into AWS MITRE ATT&CK compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
compliance_row = AWSMitreAttackModel(
Provider=finding.provider,
Description=compliance.Description,
AccountId=finding.account_uid,
Region=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Name=requirement.Name,
Requirements_Description=requirement.Description,
Requirements_Tactics=unroll_list(requirement.Tactics),
Requirements_SubTechniques=unroll_list(
requirement.SubTechniques
),
Requirements_Platforms=unroll_list(requirement.Platforms),
Requirements_TechniqueURL=requirement.TechniqueURL,
Requirements_Attributes_Services=", ".join(
attribute.AWSService for attribute in requirement.Attributes
),
Requirements_Attributes_Categories=", ".join(
attribute.Category for attribute in requirement.Attributes
),
Requirements_Attributes_Values=", ".join(
attribute.Value for attribute in requirement.Attributes
),
Requirements_Attributes_Comments=", ".join(
attribute.Comment for attribute in requirement.Attributes
),
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = AWSMitreAttackModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
AccountId="",
Region="",
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Name=requirement.Name,
Requirements_Description=requirement.Description,
Requirements_Tactics=unroll_list(requirement.Tactics),
Requirements_SubTechniques=unroll_list(
requirement.SubTechniques
),
Requirements_Platforms=unroll_list(requirement.Platforms),
Requirements_TechniqueURL=requirement.TechniqueURL,
Requirements_Attributes_Services=", ".join(
attribute.AWSService for attribute in requirement.Attributes
),
Requirements_Attributes_Categories=", ".join(
attribute.Category for attribute in requirement.Attributes
),
Requirements_Attributes_Values=", ".join(
attribute.Value for attribute in requirement.Attributes
),
Requirements_Attributes_Comments=", ".join(
attribute.Comment for attribute in requirement.Attributes
),
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)

View File

@@ -1,117 +0,0 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.mitre_attack.models import AzureMitreAttackModel
from prowler.lib.outputs.finding import Finding
from prowler.lib.outputs.utils import unroll_list
class AzureMitreAttack(ComplianceOutput):
"""
This class represents the Azure MITRE ATT&CK compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into Azure MITRE ATT&CK compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into Azure MITRE ATT&CK compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
compliance_row = AzureMitreAttackModel(
Provider=finding.provider,
Description=compliance.Description,
SubscriptionId=finding.account_uid,
Location=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Name=requirement.Name,
Requirements_Description=requirement.Description,
Requirements_Tactics=unroll_list(requirement.Tactics),
Requirements_SubTechniques=unroll_list(
requirement.SubTechniques
),
Requirements_Platforms=unroll_list(requirement.Platforms),
Requirements_TechniqueURL=requirement.TechniqueURL,
Requirements_Attributes_Services=", ".join(
attribute.AzureService
for attribute in requirement.Attributes
),
Requirements_Attributes_Categories=", ".join(
attribute.Category for attribute in requirement.Attributes
),
Requirements_Attributes_Values=", ".join(
attribute.Value for attribute in requirement.Attributes
),
Requirements_Attributes_Comments=", ".join(
attribute.Comment for attribute in requirement.Attributes
),
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = AzureMitreAttackModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
SubscriptionId="",
Location="",
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Name=requirement.Name,
Requirements_Description=requirement.Description,
Requirements_Tactics=unroll_list(requirement.Tactics),
Requirements_SubTechniques=unroll_list(
requirement.SubTechniques
),
Requirements_Platforms=unroll_list(requirement.Platforms),
Requirements_TechniqueURL=requirement.TechniqueURL,
Requirements_Attributes_Services=", ".join(
attribute.AzureService
for attribute in requirement.Attributes
),
Requirements_Attributes_Categories=", ".join(
attribute.Category for attribute in requirement.Attributes
),
Requirements_Attributes_Values=", ".join(
attribute.Value for attribute in requirement.Attributes
),
Requirements_Attributes_Comments=", ".join(
attribute.Comment for attribute in requirement.Attributes
),
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)

Some files were not shown because too many files have changed in this diff Show More