Compare commits

..

1 Commits
3.7.0 ... 3.3.3

Author SHA1 Message Date
github-actions
32f81c7672 chore(release): 3.3.3 2023-04-05 10:59:06 +00:00
846 changed files with 10396 additions and 47861 deletions

View File

@@ -35,8 +35,6 @@ jobs:
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}"
git push -f origin ${{ env.RELEASE_TAG }}
git checkout -B release-${{ env.RELEASE_TAG }}
git push origin release-${{ env.RELEASE_TAG }}
poetry build
- name: Publish prowler package to PyPI
run: |

View File

@@ -61,7 +61,6 @@ repos:
hooks:
- id: poetry-check
- id: poetry-lock
args: ["--no-update"]
- repo: https://github.com/hadolint/hadolint
rev: v2.12.1-beta
@@ -76,15 +75,6 @@ repos:
entry: bash -c 'pylint --disable=W,C,R,E -j 0 -rn -sn prowler/'
language: system
- id: trufflehog
name: TruffleHog
description: Detect secrets in your data.
# entry: bash -c 'trufflehog git file://. --only-verified --fail'
# For running trufflehog in docker, use the following entry instead:
entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
language: system
stages: ["commit", "push"]
- id: pytest-check
name: pytest-check
entry: bash -c 'pytest tests -n auto'

View File

@@ -1,13 +0,0 @@
# Do you want to learn on how to...
- Contribute with your code or fixes to Prowler
- Create a new check for a provider
- Create a new security compliance framework
- Add a custom output format
- Add a new integration
- Contribute with documentation
Want some swag as appreciation for your contribution?
# Prowler Developer Guide
https://docs.prowler.cloud/en/latest/tutorials/developer-guide/

View File

@@ -11,10 +11,11 @@
</p>
<p align="center">
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
<a href="https://pypi.org/project/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
<a href="https://pypi.python.org/pypi/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
<a href="https://pypi.org/project/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
<a href="https://pypi.python.org/pypi/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=prowler%20downloads"></a>
<a href="https://pypistats.org/packages/prowler-cloud"><img alt="PyPI Prowler-Cloud Downloads" src="https://img.shields.io/pypi/dw/prowler-cloud.svg?label=prowler-cloud%20downloads"></a>
<a href="https://formulae.brew.sh/formula/prowler#default"><img alt="Brew Prowler Downloads" src="https://img.shields.io/homebrew/installs/dm/prowler?label=brew%20downloads"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/toniblyx/prowler"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/cloud/build/toniblyx/prowler"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/image-size/toniblyx/prowler"></a>
@@ -35,14 +36,7 @@
`Prowler` is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spainish National Security Schema) and your custom security frameworks.
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.cloud/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.cloud/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 283 | 55 -> `prowler aws --list-services` | 21 -> `prowler aws --list-compliance` | 5 -> `prowler aws --list-categories` |
| GCP | 73 | 11 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 0 -> `prowler gcp --list-categories`|
| Azure | 20 | 3 -> `prowler azure --list-services` | CIS soon | 1 -> `prowler azure --list-categories` |
| Kubernetes | Planned | - | - | - |
It contains hundreds of controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
# 📖 Documentation
@@ -91,11 +85,11 @@ python prowler.py -v
You can run Prowler from your workstation, an EC2 instance, Fargate or any other container, Codebuild, CloudShell and Cloud9.
![Architecture](https://github.com/prowler-cloud/prowler/assets/38561120/080261d9-773d-4af1-af79-217a273e3176)
![Architecture](https://github.com/prowler-cloud/prowler/blob/62c1ce73bbcdd6b9e5ba03dfcae26dfd165defd9/docs/img/architecture.png?raw=True)
# 📝 Requirements
Prowler has been written in Python using the [AWS SDK (Boto3)](https://boto3.amazonaws.com/v1/documentation/api/latest/index.html#), [Azure SDK](https://azure.github.io/azure-sdk-for-python/) and [GCP API Python Client](https://github.com/googleapis/google-api-python-client/).
Prowler has been written in Python using the [AWS SDK (Boto3)](https://boto3.amazonaws.com/v1/documentation/api/latest/index.html#) and [Azure SDK](https://azure.github.io/azure-sdk-for-python/).
## AWS
Since Prowler uses AWS Credentials under the hood, you can follow any authentication method as described [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-precedence).
@@ -122,6 +116,22 @@ Those credentials must be associated to a user or role with proper permissions t
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
## Google Cloud Platform
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
- Viewer
- Security Reviewer
- Stackdriver Account Viewer
> `prowler` will scan the project associated with the credentials.
## Azure
Prowler for Azure supports the following authentication types:
@@ -144,7 +154,7 @@ export AZURE_CLIENT_SECRET="XXXXXXX"
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
### AZ CLI / Browser / Managed Identity authentication
The other three cases do not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options, `--browser-auth` needs the user to authenticate using the default browser to start the scan. Also `--browser-auth` needs the tenant id to be specified with `--tenant-id`.
The other three cases do not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options, `--browser-auth` needs the user to authenticate using the default browser to start the scan.
### Permissions
@@ -170,22 +180,6 @@ Regarding the subscription scope, Prowler by default scans all the subscriptions
- `Reader`
## Google Cloud Platform
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
- Viewer
- Security Reviewer
- Stackdriver Account Viewer
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
# 💻 Basic Usage
To run prowler, you will need to specify the provider (e.g aws or azure):
@@ -251,6 +245,14 @@ prowler aws --profile custom-profile -f us-east-1 eu-south-2
```
> By default, `prowler` will scan all AWS regions.
## Google Cloud Platform
Optionally, you can provide the location of an application credential JSON file with the following argument:
```console
prowler gcp --credentials-file path
```
## Azure
With Azure you need to specify which auth method is going to be used:
@@ -260,14 +262,12 @@ prowler azure [--sp-env-auth, --az-cli-auth, --browser-auth, --managed-identity-
```
> By default, `prowler` will scan all Azure subscriptions.
## Google Cloud Platform
# 🎉 New Features
Optionally, you can provide the location of an application credential JSON file with the following argument:
```console
prowler gcp --credentials-file path
```
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
- Python: we got rid of all bash and it is now all in Python.
- Faster: huge performance improvements (same account from 2.5 hours to 4 minutes).
- Developers and community: we have made it easier to contribute with new checks and new compliance frameworks. We also included unit tests.
- Multi-cloud: in addition to AWS, we have added Azure, we plan to include GCP and OCI soon, let us know if you want to contribute!
# 📃 License

View File

@@ -1,24 +1,45 @@
# Build command
# docker build --platform=linux/amd64 --no-cache -t prowler:latest .
ARG PROWLER_VERSION=latest
FROM public.ecr.aws/amazonlinux/amazonlinux:2022
FROM toniblyx/prowler:${PROWLER_VERSION}
ARG PROWLERVER=2.9.0
ARG USERNAME=prowler
ARG USERID=34000
USER 0
# hadolint ignore=DL3018
RUN apk --no-cache add bash aws-cli jq
# Install Dependencies
RUN \
dnf update -y && \
dnf install -y bash file findutils git jq python3 python3-pip \
python3-setuptools python3-wheel shadow-utils tar unzip which && \
dnf remove -y awscli && \
dnf clean all && \
useradd -l -s /bin/sh -U -u ${USERID} ${USERNAME} && \
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \
unzip awscliv2.zip && \
./aws/install && \
pip3 install --no-cache-dir --upgrade pip && \
pip3 install --no-cache-dir "git+https://github.com/ibm/detect-secrets.git@master#egg=detect-secrets" && \
rm -rf aws awscliv2.zip /var/cache/dnf
ARG MULTI_ACCOUNT_SECURITY_HUB_PATH=/home/prowler/multi-account-securityhub
# Place script and env vars
COPY .awsvariables run-prowler-securityhub.sh /
USER prowler
# Installs prowler and change permissions
RUN \
curl -L "https://github.com/prowler-cloud/prowler/archive/refs/tags/${PROWLERVER}.tar.gz" -o "prowler.tar.gz" && \
tar xvzf prowler.tar.gz && \
rm -f prowler.tar.gz && \
mv prowler-${PROWLERVER} prowler && \
chown ${USERNAME}:${USERNAME} /run-prowler-securityhub.sh && \
chmod 500 /run-prowler-securityhub.sh && \
chown ${USERNAME}:${USERNAME} /.awsvariables && \
chmod 400 /.awsvariables && \
chown ${USERNAME}:${USERNAME} -R /prowler && \
chmod +x /prowler/prowler
# Move script and environment variables
RUN mkdir "${MULTI_ACCOUNT_SECURITY_HUB_PATH}"
COPY --chown=prowler:prowler .awsvariables run-prowler-securityhub.sh "${MULTI_ACCOUNT_SECURITY_HUB_PATH}"/
RUN chmod 500 "${MULTI_ACCOUNT_SECURITY_HUB_PATH}"/run-prowler-securityhub.sh & \
chmod 400 "${MULTI_ACCOUNT_SECURITY_HUB_PATH}"/.awsvariables
# Drop to user
USER ${USERNAME}
WORKDIR ${MULTI_ACCOUNT_SECURITY_HUB_PATH}
ENTRYPOINT ["./run-prowler-securityhub.sh"]
# Run script
ENTRYPOINT ["/run-prowler-securityhub.sh"]

View File

@@ -1,17 +1,20 @@
#!/bin/bash
# Run Prowler against All AWS Accounts in an AWS Organization
# Change Directory (rest of the script, assumes you're in the root directory)
cd / || exit
# Show Prowler Version
prowler -v
./prowler/prowler -V
# Source .awsvariables
# shellcheck disable=SC1091
source .awsvariables
# Get Values from Environment Variables
echo "ROLE: ${ROLE}"
echo "PARALLEL_ACCOUNTS: ${PARALLEL_ACCOUNTS}"
echo "REGION: ${REGION}"
echo "ROLE: $ROLE"
echo "PARALLEL_ACCOUNTS: $PARALLEL_ACCOUNTS"
echo "REGION: $REGION"
# Function to unset AWS Profile Variables
unset_aws() {
@@ -21,33 +24,33 @@ unset_aws
# Find THIS Account AWS Number
CALLER_ARN=$(aws sts get-caller-identity --output text --query "Arn")
PARTITION=$(echo "${CALLER_ARN}" | cut -d: -f2)
THISACCOUNT=$(echo "${CALLER_ARN}" | cut -d: -f5)
echo "THISACCOUNT: ${THISACCOUNT}"
echo "PARTITION: ${PARTITION}"
PARTITION=$(echo "$CALLER_ARN" | cut -d: -f2)
THISACCOUNT=$(echo "$CALLER_ARN" | cut -d: -f5)
echo "THISACCOUNT: $THISACCOUNT"
echo "PARTITION: $PARTITION"
# Function to Assume Role to THIS Account & Create Session
this_account_session() {
unset_aws
role_credentials=$(aws sts assume-role --role-arn arn:"${PARTITION}":iam::"${THISACCOUNT}":role/"${ROLE}" --role-session-name ProwlerRun --output json)
AWS_ACCESS_KEY_ID=$(echo "${role_credentials}" | jq -r .Credentials.AccessKeyId)
AWS_SECRET_ACCESS_KEY=$(echo "${role_credentials}" | jq -r .Credentials.SecretAccessKey)
AWS_SESSION_TOKEN=$(echo "${role_credentials}" | jq -r .Credentials.SessionToken)
role_credentials=$(aws sts assume-role --role-arn arn:"$PARTITION":iam::"$THISACCOUNT":role/"$ROLE" --role-session-name ProwlerRun --output json)
AWS_ACCESS_KEY_ID=$(echo "$role_credentials" | jq -r .Credentials.AccessKeyId)
AWS_SECRET_ACCESS_KEY=$(echo "$role_credentials" | jq -r .Credentials.SecretAccessKey)
AWS_SESSION_TOKEN=$(echo "$role_credentials" | jq -r .Credentials.SessionToken)
export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN
}
# Find AWS Master Account
this_account_session
AWSMASTER=$(aws organizations describe-organization --query Organization.MasterAccountId --output text)
echo "AWSMASTER: ${AWSMASTER}"
echo "AWSMASTER: $AWSMASTER"
# Function to Assume Role to Master Account & Create Session
master_account_session() {
unset_aws
role_credentials=$(aws sts assume-role --role-arn arn:"${PARTITION}":iam::"${AWSMASTER}":role/"${ROLE}" --role-session-name ProwlerRun --output json)
AWS_ACCESS_KEY_ID=$(echo "${role_credentials}" | jq -r .Credentials.AccessKeyId)
AWS_SECRET_ACCESS_KEY=$(echo "${role_credentials}" | jq -r .Credentials.SecretAccessKey)
AWS_SESSION_TOKEN=$(echo "${role_credentials}" | jq -r .Credentials.SessionToken)
role_credentials=$(aws sts assume-role --role-arn arn:"$PARTITION":iam::"$AWSMASTER":role/"$ROLE" --role-session-name ProwlerRun --output json)
AWS_ACCESS_KEY_ID=$(echo "$role_credentials" | jq -r .Credentials.AccessKeyId)
AWS_SECRET_ACCESS_KEY=$(echo "$role_credentials" | jq -r .Credentials.SecretAccessKey)
AWS_SESSION_TOKEN=$(echo "$role_credentials" | jq -r .Credentials.SessionToken)
export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN
}
@@ -57,20 +60,20 @@ ACCOUNTS_IN_ORGS=$(aws organizations list-accounts --query Accounts[*].Id --outp
# Run Prowler against Accounts in AWS Organization
echo "AWS Accounts in Organization"
echo "${ACCOUNTS_IN_ORGS}"
for accountId in ${ACCOUNTS_IN_ORGS}; do
echo "$ACCOUNTS_IN_ORGS"
for accountId in $ACCOUNTS_IN_ORGS; do
# shellcheck disable=SC2015
test "$(jobs | wc -l)" -ge "${PARALLEL_ACCOUNTS}" && wait -n || true
test "$(jobs | wc -l)" -ge $PARALLEL_ACCOUNTS && wait -n || true
{
START_TIME=${SECONDS}
START_TIME=$SECONDS
# Unset AWS Profile Variables
unset_aws
# Run Prowler
echo -e "Assessing AWS Account: ${accountId}, using Role: ${ROLE} on $(date)"
echo -e "Assessing AWS Account: $accountId, using Role: $ROLE on $(date)"
# Pipe stdout to /dev/null to reduce unnecessary Cloudwatch logs
prowler aws -R arn:"${PARTITION}":iam::"${accountId}":role/"${ROLE}" -q -S -f "${REGION}" > /dev/null
./prowler/prowler -R "$ROLE" -A "$accountId" -M json-asff -q -S -f "$REGION" > /dev/null
TOTAL_SEC=$((SECONDS - START_TIME))
printf "Completed AWS Account: ${accountId} in %02dh:%02dm:%02ds" $((TOTAL_SEC / 3600)) $((TOTAL_SEC % 3600 / 60)) $((TOTAL_SEC % 60))
printf "Completed AWS Account: $accountId in %02dh:%02dm:%02ds" $((TOTAL_SEC / 3600)) $((TOTAL_SEC % 3600 / 60)) $((TOTAL_SEC % 60))
echo ""
} &
done

View File

@@ -1,6 +1,6 @@
# Requirements
Prowler has been written in Python using the [AWS SDK (Boto3)](https://boto3.amazonaws.com/v1/documentation/api/latest/index.html#), [Azure SDK](https://azure.github.io/azure-sdk-for-python/) and [GCP API Python Client](https://github.com/googleapis/google-api-python-client/).
Prowler has been written in Python using the [AWS SDK (Boto3)](https://boto3.amazonaws.com/v1/documentation/api/latest/index.html#) and [Azure SDK](https://learn.microsoft.com/en-us/python/api/overview/azure/?view=azure-python).
## AWS
Since Prowler uses AWS Credentials under the hood, you can follow any authentication method as described [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-precedence).
@@ -30,12 +30,23 @@ Those credentials must be associated to a user or role with proper permissions t
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
### Multi-Factor Authentication
## Google Cloud
If your IAM entity enforces MFA you can use `--mfa` and Prowler will ask you to input the following values to get a new session:
### GCP Authentication
- ARN of your MFA device
- TOTP (Time-Based One-Time Password)
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
- Viewer
- Security Reviewer
- Stackdriver Account Viewer
> `prowler` will scan the project associated with the credentials.
## Azure
@@ -59,7 +70,7 @@ export AZURE_CLIENT_SECRET="XXXXXXX"
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
### AZ CLI / Browser / Managed Identity authentication
The other three cases does not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options. To use `--browser-auth` the user needs to authenticate against Azure using the default browser to start the scan, also `tenant-id` is required.
The other three cases does not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options, `--browser-auth` needs the user to authenticate using the default browser to start the scan.
### Permissions
@@ -86,21 +97,3 @@ Regarding the subscription scope, Prowler by default scans all the subscriptions
- `Security Reader`
- `Reader`
## Google Cloud
### GCP Authentication
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
- Viewer
- Security Reviewer
- Stackdriver Account Viewer
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 283 KiB

After

Width:  |  Height:  |  Size: 258 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 11 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 22 KiB

View File

@@ -109,7 +109,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
_Requirements_:
* AWS, GCP and/or Azure credentials
* Latest Amazon Linux 2 should come with Python 3.9 already installed however it may need pip. Install Python pip 3.9 with: `sudo yum install -y python3-pip`.
* Latest Amazon Linux 2 should come with Python 3.9 already installed however it may need pip. Install Python pip 3.9 with: `sudo dnf install -y python3-pip`.
* Make sure setuptools for python is already installed with: `pip3 install setuptools`
_Commands_:
@@ -254,7 +254,13 @@ prowler aws --profile custom-profile -f us-east-1 eu-south-2
```
> By default, `prowler` will scan all AWS regions.
See more details about AWS Authentication in [Requirements](getting-started/requirements.md)
### Google Cloud
Optionally, you can provide the location of an application credential JSON file with the following argument:
```console
prowler gcp --credentials-file path
```
### Azure
@@ -274,31 +280,9 @@ prowler azure --browser-auth
prowler azure --managed-identity-auth
```
See more details about Azure Authentication in [Requirements](getting-started/requirements.md)
More details in [Requirements](getting-started/requirements.md)
Prowler by default scans all the subscriptions that is allowed to scan, if you want to scan a single subscription or various specific subscriptions you can use the following flag (using az cli auth as example):
Prowler by default scans all the subscriptions that is allowed to scan, if you want to scan a single subscription or various concrete subscriptions you can use the following flag (using az cli auth as example):
```console
prowler azure --az-cli-auth --subscription-ids <subscription ID 1> <subscription ID 2> ... <subscription ID N>
```
### Google Cloud
Prowler will use by default your User Account credentials, you can configure it using:
- `gcloud init` to use a new account
- `gcloud config set account <account>` to use an existing account
Then, obtain your access credentials using: `gcloud auth application-default login`
Otherwise, you can generate and download Service Account keys in JSON format (refer to https://cloud.google.com/iam/docs/creating-managing-service-account-keys) and provide the location of the file with the following argument:
```console
prowler gcp --credentials-file path
```
Prowler by default scans all the GCP Projects that is allowed to scan, if you want to scan a single project or various specific projects you can use the following flag:
```console
prowler gcp --project-ids <Project ID 1> <Project ID 2> ... <Project ID N>
```
See more details about GCP Authentication in [Requirements](getting-started/requirements.md)

View File

@@ -7,11 +7,9 @@ You can use `-w`/`--allowlist-file` with the path of your allowlist yaml file, b
## Allowlist Yaml File Syntax
### Account, Check and/or Region can be * to apply for all the cases.
### Resources and tags are lists that can have either Regex or Keywords.
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
### For each check you can except Accounts, Regions, Resources and/or Tags.
### Account, Check and/or Region can be * to apply for all the cases
### Resources is a list that can have either Regex or Keywords
### Tags is an optional list containing tuples of 'key=value'
########################### ALLOWLIST EXAMPLE ###########################
Allowlist:
Accounts:
@@ -23,19 +21,14 @@ You can use `-w`/`--allowlist-file` with the path of your allowlist yaml file, b
Resources:
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
"ec2_*":
Regions:
- "*"
Resources:
- "*" # Will ignore every EC2 check in every account and region
"*":
Regions:
- "*"
Resources:
- "test"
- "test" # Will ignore every resource containing the string "test" and the tags 'test=test' and 'project=test' in account 123456789012 and every region
Tags:
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and 'project=test' in account 123456789012 and every region
- "project=test"
"*":
Checks:
@@ -46,7 +39,7 @@ You can use `-w`/`--allowlist-file` with the path of your allowlist yaml file, b
Resources:
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
- ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
- "[[:alnum:]]+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
"*":
Regions:
- "*"
@@ -55,33 +48,6 @@ You can use `-w`/`--allowlist-file` with the path of your allowlist yaml file, b
Tags:
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
"*":
Checks:
"ecs_task_definitions_no_environment_secrets":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Accounts:
- "0123456789012"
Regions:
- "eu-west-1"
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
"123456789012":
Checks:
"*":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Resources:
- "test"
Tags:
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
## Supported Allowlist Locations
@@ -116,9 +82,6 @@ prowler aws -w arn:aws:dynamodb:<region_name>:<account_id>:table/<table_name>
- Regions (List): This field contains a list of regions where this allowlist rule is applied (it can also contains an `*` to apply all scanned regions).
- Resources (List): This field contains a list of regex expressions that applies to the resources that are wanted to be allowlisted.
- Tags (List): -Optional- This field contains a list of tuples in the form of 'key=value' that applies to the resources tags that are wanted to be allowlisted.
- Exceptions (Map): -Optional- This field contains a map of lists of accounts/regions/resources/tags that are wanted to be excepted in the allowlist.
The following example will allowlist all resources in all accounts for the EC2 checks in the regions `eu-west-1` and `us-east-1` with the tags `environment=dev` and `environment=prod`, except the resources containing the string `test` in the account `012345678912` and region `eu-west-1` with the tag `environment=prod`:
<img src="../img/allowlist-row.png"/>

View File

@@ -1,31 +0,0 @@
# AWS Authentication
Make sure you have properly configured your AWS-CLI with a valid Access Key and Region or declare AWS variables properly (or instance profile/role):
```console
aws configure
```
or
```console
export AWS_ACCESS_KEY_ID="ASXXXXXXX"
export AWS_SECRET_ACCESS_KEY="XXXXXXXXX"
export AWS_SESSION_TOKEN="XXXXXXXXX"
```
Those credentials must be associated to a user or role with proper permissions to do all checks. To make sure, add the following AWS managed policies to the user or role being used:
- arn:aws:iam::aws:policy/SecurityAudit
- arn:aws:iam::aws:policy/job-function/ViewOnlyAccess
> Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using.
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
## Multi-Factor Authentication
If your IAM entity enforces MFA you can use `--mfa` and Prowler will ask you to input the following values to get a new session:
- ARN of your MFA device
- TOTP (Time-Based One-Time Password)

View File

@@ -1,81 +0,0 @@
# AWS Regions and Partitions
By default Prowler is able to scan the following AWS partitions:
- Commercial: `aws`
- China: `aws-cn`
- GovCloud (US): `aws-us-gov`
> To check the available regions for each partition and service please refer to the following document [aws_regions_by_service.json](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/aws_regions_by_service.json)
It is important to take into consideration that to scan the China (`aws-cn`) or GovCloud (`aws-us-gov`) partitions it is either required to have a valid region for that partition in your AWS credentials or to specify the regions you want to audit for that partition using the `-f/--region` flag.
> Please, refer to https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials for more information about the AWS credentials configuration.
You can get more information about the available partitions and regions in the following [Botocore](https://github.com/boto/botocore) [file](https://github.com/boto/botocore/blob/22a19ea7c4c2c4dd7df4ab8c32733cba0c7597a4/botocore/data/partitions.json).
## AWS China
To scan your AWS account in the China partition (`aws-cn`):
- Using the `-f/--region` flag:
```
prowler aws --region cn-north-1 cn-northwest-1
```
- Using the region configured in your AWS profile at `~/.aws/credentials` or `~/.aws/config`:
```
[default]
aws_access_key_id = XXXXXXXXXXXXXXXXXXX
aws_secret_access_key = XXXXXXXXXXXXXXXXXXX
region = cn-north-1
```
> With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
## AWS GovCloud (US)
To scan your AWS account in the GovCloud (US) partition (`aws-us-gov`):
- Using the `-f/--region` flag:
```
prowler aws --region us-gov-east-1 us-gov-west-1
```
- Using the region configured in your AWS profile at `~/.aws/credentials` or `~/.aws/config`:
```
[default]
aws_access_key_id = XXXXXXXXXXXXXXXXXXX
aws_secret_access_key = XXXXXXXXXXXXXXXXXXX
region = us-gov-east-1
```
> With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
## AWS ISO (US & Europe)
For the AWS ISO partitions, which are known as "secret partitions" and are air-gapped from the Internet, there is no builtin way to scan it. If you want to audit an AWS account in one of the AWS ISO partitions you should manually update the [aws_regions_by_service.json](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/aws_regions_by_service.json) and include the partition, region and services, e.g.:
```json
"iam": {
"regions": {
"aws": [
"eu-west-1",
"us-east-1",
],
"aws-cn": [
"cn-north-1",
"cn-northwest-1"
],
"aws-us-gov": [
"us-gov-east-1",
"us-gov-west-1"
],
"aws-iso": [
"aws-iso-global",
"us-iso-east-1",
"us-iso-west-1"
],
"aws-iso-b": [
"aws-iso-b-global",
"us-isob-east-1"
],
"aws-iso-e": [],
}
},
```

View File

@@ -5,7 +5,7 @@ Prowler uses the AWS SDK (Boto3) underneath so it uses the same authentication m
However, there are few ways to run Prowler against multiple accounts using IAM Assume Role feature depending on each use case:
1. You can just set up your custom profile inside `~/.aws/config` with all needed information about the role to assume then call it with `prowler aws -p/--profile your-custom-profile`.
- An example profile that performs role-chaining is given below. The `credential_source` can either be set to `Environment`, `Ec2InstanceMetadata`, or `EcsContainer`.
- An example profile that performs role-chaining is given below. The `credential_source` can either be set to `Environment`, `Ec2InstanceMetadata`, or `EcsContainer`.
- Alternatively, you could use the `source_profile` instead of `credential_source` to specify a separate named profile that contains IAM user credentials with permission to assume the target the role. More information can be found [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html).
```
[profile crossaccountrole]
@@ -23,13 +23,6 @@ prowler aws -R arn:aws:iam::<account_id>:role/<role_name>
prowler aws -T/--session-duration <seconds> -I/--external-id <external_id> -R arn:aws:iam::<account_id>:role/<role_name>
```
## Role MFA
If your IAM Role has MFA configured you can use `--mfa` along with `-R`/`--role <role_arn>` and Prowler will ask you to input the following values to get a new temporary session for the IAM Role provided:
- ARN of your MFA device
- TOTP (Time-Based One-Time Password)
## Create Role
To create a role to be assumed in one or multiple accounts you can use either as CloudFormation Stack or StackSet the following [template](https://github.com/prowler-cloud/prowler/blob/master/permissions/create_role_to_assume_cfn.yaml) and adapt it.

View File

@@ -29,34 +29,14 @@ prowler -S -f eu-west-1
> **Note 1**: It is recommended to send only fails to Security Hub and that is possible adding `-q` to the command.
> **Note 2**: Since Prowler perform checks to all regions by default you may need to filter by region when runing Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f <region>` (if no region is used it will try to push findings in all regions hubs). Prowler will send findings to the Security Hub on the region where the scanned resource is located.
> **Note 2**: Since Prowler perform checks to all regions by defauls you may need to filter by region when runing Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f <region>` (if no region is used it will try to push findings in all regions hubs).
> **Note 3**: To have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
> **Note 3** to have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
Once you run findings for first time you will be able to see Prowler findings in Findings section:
![Screenshot 2020-10-29 at 10 29 05 PM](https://user-images.githubusercontent.com/3985464/97634676-66c9f600-1a36-11eb-9341-70feb06f6331.png)
## Send findings to Security Hub assuming an IAM Role
When you are auditing a multi-account AWS environment, you can send findings to a Security Hub of another account by assuming an IAM role from that account using the `-R` flag in the Prowler command:
```sh
prowler -S -R arn:aws:iam::123456789012:role/ProwlerExecRole
```
> Remember that the used role needs to have permissions to send findings to Security Hub. To get more information about the permissions required, please refer to the following IAM policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
## Send only failed findings to Security Hub
When using Security Hub it is recommended to send only the failed findings generated. To follow that recommendation you could add the `-q` flag to the Prowler command:
```sh
prowler -S -q
```
## Skip sending updates of findings to Security Hub
By default, Prowler archives all its findings in Security Hub that have not appeared in the last scan.

View File

@@ -18,7 +18,7 @@ prowler azure --sp-env-auth
prowler azure --az-cli-auth
# To use browser authentication
prowler azure --browser-auth --tenant-id "XXXXXXXX"
prowler azure --browser-auth
# To use managed identity auth
prowler azure --managed-identity-auth

View File

@@ -13,7 +13,6 @@ Currently, the available frameworks are:
- `ens_rd2022_aws`
- `aws_audit_manager_control_tower_guardrails_aws`
- `aws_foundational_security_best_practices_aws`
- `aws_well_architected_framework_security_pillar_aws`
- `cisa_aws`
- `fedramp_low_revision_4_aws`
- `fedramp_moderate_revision_4_aws`

View File

@@ -1,29 +0,0 @@
# GCP authentication
Prowler will use by default your User Account credentials, you can configure it using:
- `gcloud init` to use a new account
- `gcloud config set account <account>` to use an existing account
Then, obtain your access credentials using: `gcloud auth application-default login`
Otherwise, you can generate and download Service Account keys in JSON format (refer to https://cloud.google.com/iam/docs/creating-managing-service-account-keys) and provide the location of the file with the following argument:
```console
prowler gcp --credentials-file path
```
> `prowler` will scan the GCP project associated with the credentials.
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
- Viewer
- Security Reviewer
- Stackdriver Account Viewer

Binary file not shown.

Before

Width:  |  Height:  |  Size: 94 KiB

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 200 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 456 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 69 KiB

View File

@@ -1,36 +0,0 @@
# Integrations
## Slack
Prowler can be integrated with [Slack](https://slack.com/) to send a summary of the execution having configured a Slack APP in your channel with the following command:
```sh
prowler <provider> --slack
```
![Prowler Slack Message](img/slack-prowler-message.png)
> Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables.
### Configuration
To configure the Slack Integration, follow the next steps:
1. Create a Slack Application:
- Go to [Slack API page](https://api.slack.com/tutorials/tracks/getting-a-token), scroll down to the *Create app* button and select your workspace:
![Create Slack App](img/create-slack-app.png)
- Install the application in your selected workspaces:
![Install Slack App in Workspace](img/install-in-slack-workspace.png)
- Get the *Slack App OAuth Token* that Prowler needs to send the message:
![Slack App OAuth Token](img/slack-app-token.png)
2. Optionally, create a Slack Channel (you can use an existing one)
3. Integrate the created Slack App to your Slack channel:
- Click on the channel, go to the Integrations tab, and Add an App.
![Slack App Channel Integration](img/integrate-slack-app.png)
4. Set the following environment variables that Prowler will read:
- `SLACK_API_TOKEN`: the *Slack App OAuth Token* that was previously get.
- `SLACK_CHANNEL_ID`: the name of your Slack Channel where Prowler will send the message.

View File

@@ -51,26 +51,6 @@ prowler <provider> -e/--excluded-checks ec2 rds
```console
prowler <provider> -C/--checks-file <checks_list>.json
```
## Custom Checks
Prowler allows you to include your custom checks with the flag:
```console
prowler <provider> -x/--checks-folder <custom_checks_folder>
```
> S3 URIs are also supported as folders for custom checks, e.g. s3://bucket/prefix/checks_folder/. Make sure that the used credentials have s3:GetObject permissions in the S3 path where the custom checks are located.
The custom checks folder must contain one subfolder per check, each subfolder must be named as the check and must contain:
- An empty `__init__.py`: to make Python treat this check folder as a package.
- A `check_name.py` containing the check's logic.
- A `check_name.metadata.json` containing the check's metadata.
>The check name must start with the service name followed by an underscore (e.g., ec2_instance_public_ip).
To see more information about how to write checks see the [Developer Guide](../developer-guide/#create-a-new-check-for-a-provider).
> If you want to run ONLY your custom check(s), import it with -x (--checks-folder) and then run it with -c (--checks), e.g.:
```console
prowler aws -x s3://bucket/prowler/providers/aws/services/s3/s3_bucket_policy/ -c s3_bucket_policy
```
## Severities
Each of Prowler's checks has a severity, which can be:

View File

@@ -11,7 +11,7 @@ The actual checks that have this funcionality are:
1. autoscaling_find_secrets_ec2_launch_configuration
- awslambda_function_no_secrets_in_code
- awslambda_function_no_secrets_in_variables
- cloudformation_stack_outputs_find_secrets
- cloudformation_outputs_find_secrets
- ec2_instance_secrets_user_data
- ecs_task_definitions_no_environment_secrets
- ssm_document_secrets

View File

@@ -16,5 +16,4 @@ prowler <provider> -i
![Quick Inventory Example](../img/quick-inventory.jpg)
## Objections
The inventorying process is done with `resourcegroupstaggingapi` calls which means that only resources they have or have had tags will appear (except for the IAM and S3 resources which are done with Boto3 API calls).
> The inventorying process is done with `resourcegroupstaggingapi` calls (except for the IAM resources which are done with Boto3 API calls.)

View File

@@ -1,9 +1,9 @@
# Reporting
By default, Prowler will generate a CSV, JSON, JSON-OCSF and a HTML report, however you could generate a JSON-ASFF (used by AWS Security Hub) report with `-M` or `--output-modes`:
By default, Prowler will generate a CSV, JSON and a HTML report, however you could generate a JSON-ASFF (used by AWS Security Hub) report with `-M` or `--output-modes`:
```console
prowler <provider> -M csv json json-ocsf json-asff html
prowler <provider> -M csv json json-asff html
```
## Custom Output Flags
@@ -25,19 +25,13 @@ prowler <provider> -M csv json json-asff html -F <custom_report_name> -o <custom
```
## Send report to AWS S3 Bucket
To save your report in an S3 bucket, use `-B`/`--output-bucket`.
```sh
prowler <provider> -B my-bucket/folder/
```
By default Prowler sends HTML, JSON and CSV output formats, if you want to send a custom output format or a single one of the defaults you can specify it with the `-M` flag.
To save your report in an S3 bucket, use `-B`/`--output-bucket` to define a custom output bucket along with `-M` to define the output format that is going to be uploaded to S3:
```sh
prowler <provider> -M csv -B my-bucket/folder/
```
> In the case you do not want to use the assumed role credentials but the initial credentials to put the reports into the S3 bucket, use `-D`/`--output-bucket-no-assume` instead of `-B`/`--output-bucket`.
> In the case you do not want to use the assumed role credentials but the initial credentials to put the reports into the S3 bucket, use `-D`/`--output-bucket-no-assume` instead of `-B`/`--output-bucket.
> Make sure that the used credentials have s3:PutObject permissions in the S3 path where the reports are going to be uploaded.
@@ -47,7 +41,6 @@ Prowler supports natively the following output formats:
- CSV
- JSON
- JSON-OCSF
- JSON-ASFF
- HTML
@@ -154,265 +147,6 @@ Hereunder is the structure for each of the supported report formats by Prowler:
> NOTE: Each finding is a `json` object.
### JSON-OCSF
Based on [Open Cybersecurity Schema Framework Security Finding v1.0.0-rc.3](https://schema.ocsf.io/1.0.0-rc.3/classes/security_finding?extensions=)
```
[{
"finding": {
"title": "Check if ACM Certificates are about to expire in specific days or less",
"desc": "Check if ACM Certificates are about to expire in specific days or less",
"supporting_data": {
"Risk": "Expired certificates can impact service availability.",
"Notes": ""
},
"remediation": {
"kb_articles": [
"https://docs.aws.amazon.com/config/latest/developerguide/acm-certificate-expiration-check.html"
],
"desc": "Monitor certificate expiration and take automated action to renew; replace or remove. Having shorter TTL for any security artifact is a general recommendation; but requires additional automation in place. If not longer required delete certificate. Use AWS config using the managed rule: acm-certificate-expiration-check."
},
"types": [
"Data Protection"
],
"src_url": "https://docs.aws.amazon.com/config/latest/developerguide/acm-certificate-expiration-check.html",
"uid": "prowler-aws-acm_certificates_expiration_check-012345678912-eu-west-1-*.xxxxxxxxxxxxxx",
"related_events": []
},
"resources": [
{
"group": {
"name": "acm"
},
"region": "eu-west-1",
"name": "xxxxxxxxxxxxxx",
"uid": "arn:aws:acm:eu-west-1:012345678912:certificate/xxxxxxxxxxxxxx",
"labels": [
{
"Key": "project",
"Value": "prowler-pro"
},
{
"Key": "environment",
"Value": "dev"
},
{
"Key": "terraform",
"Value": "true"
},
{
"Key": "terraform_state",
"Value": "aws"
}
],
"type": "AwsCertificateManagerCertificate",
"details": ""
}
],
"status_detail": "ACM Certificate for xxxxxxxxxxxxxx expires in 111 days.",
"compliance": {
"status": "Success",
"requirements": [
"CISA: ['your-data-2']",
"SOC2: ['cc_6_7']",
"MITRE-ATTACK: ['T1040']",
"GDPR: ['article_32']",
"HIPAA: ['164_308_a_4_ii_a', '164_312_e_1']",
"AWS-Well-Architected-Framework-Security-Pillar: ['SEC09-BP01']",
"NIST-800-171-Revision-2: ['3_13_1', '3_13_2', '3_13_8', '3_13_11']",
"NIST-800-53-Revision-4: ['ac_4', 'ac_17_2', 'sc_12']",
"NIST-800-53-Revision-5: ['sc_7_12', 'sc_7_16']",
"NIST-CSF-1.1: ['ac_5', 'ds_2']",
"RBI-Cyber-Security-Framework: ['annex_i_1_3']",
"FFIEC: ['d3-pc-im-b-1']",
"FedRamp-Moderate-Revision-4: ['ac-4', 'ac-17-2', 'sc-12']",
"FedRAMP-Low-Revision-4: ['ac-17', 'sc-12']"
],
"status_detail": "ACM Certificate for xxxxxxxxxxxxxx expires in 111 days."
},
"message": "ACM Certificate for xxxxxxxxxxxxxx expires in 111 days.",
"severity_id": 4,
"severity": "High",
"cloud": {
"account": {
"name": "",
"uid": "012345678912"
},
"region": "eu-west-1",
"org": {
"uid": "",
"name": ""
},
"provider": "aws",
"project_uid": ""
},
"time": "2023-06-30 10:28:55.297615",
"metadata": {
"original_time": "2023-06-30T10:28:55.297615",
"profiles": [
"dev"
],
"product": {
"language": "en",
"name": "Prowler",
"version": "3.6.1",
"vendor_name": "Prowler/ProwlerPro",
"feature": {
"name": "acm_certificates_expiration_check",
"uid": "acm_certificates_expiration_check",
"version": "3.6.1"
}
},
"version": "1.0.0-rc.3"
},
"state_id": 0,
"state": "New",
"status_id": 1,
"status": "Success",
"type_uid": 200101,
"type_name": "Security Finding: Create",
"impact_id": 0,
"impact": "Unknown",
"confidence_id": 0,
"confidence": "Unknown",
"activity_id": 1,
"activity_name": "Create",
"category_uid": 2,
"category_name": "Findings",
"class_uid": 2001,
"class_name": "Security Finding"
},{
"finding": {
"title": "Check if ACM Certificates are about to expire in specific days or less",
"desc": "Check if ACM Certificates are about to expire in specific days or less",
"supporting_data": {
"Risk": "Expired certificates can impact service availability.",
"Notes": ""
},
"remediation": {
"kb_articles": [
"https://docs.aws.amazon.com/config/latest/developerguide/acm-certificate-expiration-check.html"
],
"desc": "Monitor certificate expiration and take automated action to renew; replace or remove. Having shorter TTL for any security artifact is a general recommendation; but requires additional automation in place. If not longer required delete certificate. Use AWS config using the managed rule: acm-certificate-expiration-check."
},
"types": [
"Data Protection"
],
"src_url": "https://docs.aws.amazon.com/config/latest/developerguide/acm-certificate-expiration-check.html",
"uid": "prowler-aws-acm_certificates_expiration_check-012345678912-eu-west-1-xxxxxxxxxxxxx",
"related_events": []
},
"resources": [
{
"group": {
"name": "acm"
},
"region": "eu-west-1",
"name": "xxxxxxxxxxxxx",
"uid": "arn:aws:acm:eu-west-1:012345678912:certificate/3ea965a0-368d-4d13-95eb-5042a994edc4",
"labels": [
{
"Key": "name",
"Value": "prowler-pro-saas-dev-acm-internal-wildcard"
},
{
"Key": "project",
"Value": "prowler-pro-saas"
},
{
"Key": "environment",
"Value": "dev"
},
{
"Key": "terraform",
"Value": "true"
},
{
"Key": "terraform_state",
"Value": "aws/saas/base"
}
],
"type": "AwsCertificateManagerCertificate",
"details": ""
}
],
"status_detail": "ACM Certificate for xxxxxxxxxxxxx expires in 119 days.",
"compliance": {
"status": "Success",
"requirements": [
"CISA: ['your-data-2']",
"SOC2: ['cc_6_7']",
"MITRE-ATTACK: ['T1040']",
"GDPR: ['article_32']",
"HIPAA: ['164_308_a_4_ii_a', '164_312_e_1']",
"AWS-Well-Architected-Framework-Security-Pillar: ['SEC09-BP01']",
"NIST-800-171-Revision-2: ['3_13_1', '3_13_2', '3_13_8', '3_13_11']",
"NIST-800-53-Revision-4: ['ac_4', 'ac_17_2', 'sc_12']",
"NIST-800-53-Revision-5: ['sc_7_12', 'sc_7_16']",
"NIST-CSF-1.1: ['ac_5', 'ds_2']",
"RBI-Cyber-Security-Framework: ['annex_i_1_3']",
"FFIEC: ['d3-pc-im-b-1']",
"FedRamp-Moderate-Revision-4: ['ac-4', 'ac-17-2', 'sc-12']",
"FedRAMP-Low-Revision-4: ['ac-17', 'sc-12']"
],
"status_detail": "ACM Certificate for xxxxxxxxxxxxx expires in 119 days."
},
"message": "ACM Certificate for xxxxxxxxxxxxx expires in 119 days.",
"severity_id": 4,
"severity": "High",
"cloud": {
"account": {
"name": "",
"uid": "012345678912"
},
"region": "eu-west-1",
"org": {
"uid": "",
"name": ""
},
"provider": "aws",
"project_uid": ""
},
"time": "2023-06-30 10:28:55.297615",
"metadata": {
"original_time": "2023-06-30T10:28:55.297615",
"profiles": [
"dev"
],
"product": {
"language": "en",
"name": "Prowler",
"version": "3.6.1",
"vendor_name": "Prowler/ProwlerPro",
"feature": {
"name": "acm_certificates_expiration_check",
"uid": "acm_certificates_expiration_check",
"version": "3.6.1"
}
},
"version": "1.0.0-rc.3"
},
"state_id": 0,
"state": "New",
"status_id": 1,
"status": "Success",
"type_uid": 200101,
"type_name": "Security Finding: Create",
"impact_id": 0,
"impact": "Unknown",
"confidence_id": 0,
"confidence": "Unknown",
"activity_id": 1,
"activity_name": "Create",
"category_uid": 2,
"category_name": "Findings",
"class_uid": 2001,
"class_name": "Security Finding"
}]
```
> NOTE: Each finding is a `json` object.
### JSON-ASFF
```

View File

@@ -9,7 +9,7 @@ theme:
language: en
logo: img/prowler-logo.png
name: material
favicon: img/prowler-icon.svg
favicon: img/ProwlerPro-icon.svg
features:
- navigation.tabs
- navigation.tabs.sticky
@@ -33,18 +33,15 @@ nav:
- Reporting: tutorials/reporting.md
- Compliance: tutorials/compliance.md
- Quick Inventory: tutorials/quick-inventory.md
- Integrations: tutorials/integrations.md
- Configuration File: tutorials/configuration_file.md
- Logging: tutorials/logging.md
- Allowlist: tutorials/allowlist.md
- Pentesting: tutorials/pentesting.md
- Developer Guide: tutorials/developer-guide.md
- AWS:
- Authentication: tutorials/aws/authentication.md
- Assume Role: tutorials/aws/role-assumption.md
- AWS Security Hub: tutorials/aws/securityhub.md
- AWS Organizations: tutorials/aws/organizations.md
- AWS Regions and Partitions: tutorials/aws/regions-and-partitions.md
- Scan Multiple AWS Accounts: tutorials/aws/multiaccount.md
- AWS CloudShell: tutorials/aws/cloudshell.md
- Checks v2 to v3 Mapping: tutorials/aws/v2_to_v3_checks_mapping.md
@@ -54,8 +51,6 @@ nav:
- Azure:
- Authentication: tutorials/azure/authentication.md
- Subscriptions: tutorials/azure/subscriptions.md
- Google Cloud:
- Authentication: tutorials/gcp/authentication.md
- Developer Guide: tutorials/developer-guide.md
- Security: security.md
- Contact Us: contact.md

View File

@@ -6,34 +6,28 @@
"account:Get*",
"appstream:Describe*",
"appstream:List*",
"backup:List*",
"cloudtrail:GetInsightSelectors",
"codeartifact:List*",
"codebuild:BatchGet*",
"drs:Describe*",
"ds:Get*",
"ds:Describe*",
"ds:Get*",
"ds:List*",
"ec2:GetEbsEncryptionByDefault",
"ecr:Describe*",
"ecr:GetRegistryScanningConfiguration",
"elasticfilesystem:DescribeBackupPolicy",
"glue:GetConnections",
"glue:GetSecurityConfiguration*",
"glue:SearchTables",
"lambda:GetFunction*",
"logs:FilterLogEvents",
"macie2:GetMacieSession",
"s3:GetAccountPublicAccessBlock",
"shield:DescribeProtection",
"shield:GetSubscriptionState",
"securityhub:BatchImportFindings",
"securityhub:GetFindings",
"ssm:GetDocument",
"ssm-incidents:List*",
"support:Describe*",
"tag:GetTagKeys",
"wellarchitected:List*"
"organizations:DescribeOrganization",
"organizations:ListPolicies*",
"organizations:DescribePolicy"
],
"Resource": "*",
"Effect": "Allow",
@@ -45,8 +39,7 @@
"apigateway:GET"
],
"Resource": [
"arn:aws:apigateway:*::/restapis/*",
"arn:aws:apigateway:*::/apis/*"
"arn:aws:apigateway:*::/restapis/*"
]
}
]

920
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
from prowler.lib.banner import print_banner
@@ -13,13 +12,11 @@ from prowler.lib.check.check import (
execute_checks,
list_categories,
list_services,
parse_checks_from_folder,
print_categories,
print_checks,
print_compliance_frameworks,
print_compliance_requirements,
print_services,
remove_custom_checks_module,
)
from prowler.lib.check.checks_loader import load_checks_to_execute
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
@@ -29,7 +26,6 @@ from prowler.lib.outputs.compliance import display_compliance_table
from prowler.lib.outputs.html import add_html_footer, fill_html_overview_statistics
from prowler.lib.outputs.json import close_json
from prowler.lib.outputs.outputs import extract_findings_statistics, send_to_s3_bucket
from prowler.lib.outputs.slack import send_slack_message
from prowler.lib.outputs.summary_table import display_summary_table
from prowler.providers.aws.lib.security_hub.security_hub import (
resolve_security_hub_previous_findings,
@@ -56,13 +52,9 @@ def prowler():
services = args.services
categories = args.categories
checks_file = args.checks_file
checks_folder = args.checks_folder
severities = args.severity
compliance_framework = args.compliance
if not args.no_banner:
print_banner(args)
# We treat the compliance framework as another output format
if compliance_framework:
args.output_modes.extend(compliance_framework)
@@ -70,6 +62,9 @@ def prowler():
# Set Logger configuration
set_logging_config(args.log_level, args.log_file, args.only_logs)
if not args.no_banner:
print_banner(args)
if args.list_services:
print_services(list_services(provider))
sys.exit()
@@ -79,7 +74,7 @@ def prowler():
bulk_checks_metadata = bulk_load_checks_metadata(provider)
if args.list_categories:
print_categories(list_categories(bulk_checks_metadata))
print_categories(list_categories(provider, bulk_checks_metadata))
sys.exit()
bulk_compliance_frameworks = {}
@@ -113,18 +108,6 @@ def prowler():
provider,
)
# If -l/--list-checks passed as argument, print checks to execute and quit
if args.list_checks:
print_checks(provider, sorted(checks_to_execute), bulk_checks_metadata)
sys.exit()
# Set the audit info based on the selected provider
audit_info = set_provider_audit_info(provider, args.__dict__)
# Import custom checks from folder
if checks_folder:
parse_checks_from_folder(audit_info, checks_folder, provider)
# Exclude checks if -e/--excluded-checks
if excluded_checks:
checks_to_execute = exclude_checks_to_run(checks_to_execute, excluded_checks)
@@ -135,14 +118,22 @@ def prowler():
checks_to_execute, excluded_services, provider
)
# Sort final check list
checks_to_execute = sorted(checks_to_execute)
# If -l/--list-checks passed as argument, print checks to execute and quit
if args.list_checks:
print_checks(provider, checks_to_execute, bulk_checks_metadata)
sys.exit()
# Set the audit info based on the selected provider
audit_info = set_provider_audit_info(provider, args.__dict__)
# Once the audit_info is set and we have the eventual checks based on the resource identifier,
# it is time to check what Prowler's checks are going to be executed
if audit_info.audit_resources:
checks_to_execute = set_provider_execution_parameters(provider, audit_info)
# Sort final check list
checks_to_execute = sorted(checks_to_execute)
# Parse Allowlist
allowlist_file = set_provider_allowlist(provider, audit_info, args)
@@ -153,7 +144,7 @@ def prowler():
# Run the quick inventory for the provider if available
if hasattr(args, "quick_inventory") and args.quick_inventory:
run_provider_quick_inventory(provider, audit_info, args)
run_provider_quick_inventory(provider, audit_info, args.output_directory)
sys.exit()
# Execute checks
@@ -170,25 +161,10 @@ def prowler():
# Extract findings stats
stats = extract_findings_statistics(findings)
if args.slack:
if "SLACK_API_TOKEN" in os.environ and "SLACK_CHANNEL_ID" in os.environ:
_ = send_slack_message(
os.environ["SLACK_API_TOKEN"],
os.environ["SLACK_CHANNEL_ID"],
stats,
provider,
audit_info,
)
else:
logger.critical(
"Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables (see more in https://docs.prowler.cloud/en/latest/tutorials/integrations/#slack)."
)
sys.exit(1)
if args.output_modes:
for mode in args.output_modes:
# Close json file if exists
if "json" in mode:
if mode == "json" or mode == "json-asff":
close_json(
audit_output_options.output_filename, args.output_directory, mode
)
@@ -241,10 +217,6 @@ def prowler():
audit_output_options.output_directory,
)
# If custom checks were passed, remove the modules
if checks_folder:
remove_custom_checks_module(checks_folder, provider)
# If there are failed findings exit code 3, except if -z is input
if not args.ignore_exit_code_3 and stats["total_fail"] > 0:
sys.exit(3)

View File

@@ -362,14 +362,14 @@
}
],
"Checks": [
"iam_policy_no_administrative_privileges",
"iam_rotate_access_key_90_days",
"iam_no_root_access_key",
"iam_user_mfa_enabled_console_access",
"iam_root_hardware_mfa_enabled",
"iam_password_policy_minimum_length_14",
"iam_disable_90_days_credentials",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

View File

@@ -1,79 +0,0 @@
{
"Framework": "AWS-Well-Architected-Framework-Reliability-Pillar",
"Version": "",
"Provider": "AWS",
"Description": "Best Practices for the AWS Well-Architected Framework Reliability Pillar encompasses the ability of a workload to perform its intended function correctly and consistently when its expected to. This includes the ability to operate and test the workload through its total lifecycle.",
"Requirements": [
{
"Id": "REL09-BP03",
"Description": "Configure backups to be taken automatically based on a periodic schedule informed by the Recovery Point Objective (RPO), or by changes in the dataset. Critical datasets with low data loss requirements need to be backed up automatically on a frequent basis, whereas less critical data where some loss is acceptable can be backed up less frequently.",
"Attributes": [
{
"Name": "REL09-BP03 Perform data backup automatically",
"WellArchitectedQuestionId": "backing-up-data",
"WellArchitectedPracticeId": "rel_backing_up_data_automated_backups_data",
"Section": "Failure management",
"SubSection": "Backup up data",
"LevelOfRisk": "High",
"AssessmentMethod": "Automated",
"Description": "Configure backups to be taken automatically based on a periodic schedule informed by the Recovery Point Objective (RPO), or by changes in the dataset. Critical datasets with low data loss requirements need to be backed up automatically on a frequent basis, whereas less critical data where some loss is acceptable can be backed up less frequently.",
"ImplementationGuidanceUrl": "https://docs.aws.amazon.com/wellarchitected/latest/reliability-pillar/rel_backing_up_data_automated_backups_data.html#implementation-guidance"
}
],
"Checks": [
"cloudformation_stacks_termination_protection_enabled",
"rds_instance_backup_enabled",
"rds_instance_deletion_protection",
"dynamodb_tables_pitr_enabled"
]
},
{
"Id": "REL06-BP01",
"Description": "Monitor components and services of AWS workload effectifely, using tools like Amazon CloudWatch and AWS Health Dashboard. Define relevant metrics, set thresholds, and analyze metrics and logs for early detection of issues.",
"Attributes": [
{
"Name": "REL06-BP01 Monitor all components for the workload (Generation)",
"WellArchitectedQuestionId": "monitor-aws-resources",
"WellArchitectedPracticeId": "rel_monitor_aws_resources_monitor_resources",
"Section": "Change management",
"SubSection": "Monitor workload resources",
"LevelOfRisk": "High",
"AssessmentMethod": "Automated",
"Description": "Monitor components and services of AWS workload effectifely, using tools like Amazon CloudWatch and AWS Health Dashboard. Define relevant metrics, set thresholds, and analyze metrics and logs for early detection of issues.",
"ImplementationGuidanceUrl": "https://docs.aws.amazon.com/wellarchitected/latest/reliability-pillar/rel_monitor_aws_resources_monitor_resources.html#implementation-guidance"
}
],
"Checks": [
"apigateway_logging_enabled",
"apigatewayv2_access_logging_enabled",
"awslambda_function_invoke_api_operations_cloudtrail_logging_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"elb_logging_enabled",
"opensearch_service_domains_audit_logging_enabled",
"opensearch_service_domains_cloudwatch_logging_enabled",
"rds_instance_enhanced_monitoring_enabled",
"rds_instance_integration_cloudwatch_logs"
]
},
{
"Id": "REL10-BP01",
"Description": "Distribute workload data and resources across multiple Availability Zones or, where necessary, across AWS Regions. These locations can be as diverse as required.",
"Attributes": [
{
"Name": "REL10-BP01 Deploy the workload to multiple locations",
"WellArchitectedQuestionId": "fault-isolation",
"WellArchitectedPracticeId": "rel_fault_isolation_multiaz_region_system",
"Section": "Failure management",
"SubSection": "Use fault isolation to protect your workload",
"LevelOfRisk": "High",
"AssessmentMethod": "Automated",
"Description": "Distribute workload data and resources across multiple Availability Zones or, where necessary, across AWS Regions. These locations can be as diverse as required.",
"ImplementationGuidanceUrl": "https://docs.aws.amazon.com/wellarchitected/latest/reliability-pillar/use-fault-isolation-to-protect-your-workload.html#implementation-guidance."
}
],
"Checks": [
"rds_instance_multi_az"
]
}
]
}

View File

@@ -155,8 +155,7 @@
"Id": "1.16",
"Description": "Ensure IAM policies that allow full \"*:*\" administrative privileges are not attached",
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
],
"Attributes": [
{

View File

@@ -155,8 +155,7 @@
"Id": "1.16",
"Description": "Ensure IAM policies that allow full \"*:*\" administrative privileges are not attached",
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
],
"Attributes": [
{

View File

@@ -88,8 +88,7 @@
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_no_custom_policy_permissive_role_assumption",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -182,8 +181,7 @@
"Checks": [
"elbv2_ssl_listeners",
"iam_no_custom_policy_permissive_role_assumption",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},

View File

@@ -74,30 +74,6 @@
"iam_support_role_created"
]
},
{
"Id": "op.acc.3.r2.aws.iam.1",
"Description": "Privilegios de auditoría",
"Attributes": [
{
"IdGrupoControl": "op.acc.3.r2",
"Marco": "operacional",
"Categoria": "control de acceso",
"DescripcionControl": "Disponer de cuentas con privilegios de auditoría estrictamente controladas y personalizadas.",
"Nivel": "opcional",
"Tipo": "refuerzo",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"iam_securityaudit_role_created"
]
},
{
"Id": "op.acc.4.aws.iam.1",
"Description": "Proceso de gestión de derechos de acceso",
@@ -127,10 +103,7 @@
"awslambda_function_url_public",
"awslambda_function_url_cors_policy",
"iam_policy_allows_privilege_escalation",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"organizations_scp_check_deny_regions",
"organizations_account_part_of_organizations"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -156,8 +129,7 @@
"Checks": [
"iam_policy_allows_privilege_escalation",
"iam_no_custom_policy_permissive_role_assumption",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -230,8 +202,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -535,31 +506,6 @@
"config_recorder_all_regions_enabled"
]
},
{
"Id": "op.exp.1.aws.cfg.2",
"Description": "Inventario de activos",
"Attributes": [
{
"IdGrupoControl": "op.exp.1",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Configurar una regla de Config Rules que alerte sobre el despliegue de recursos sin las etiquetas correspondientes asociadas.",
"Nivel": "bajo",
"Tipo": "recomendacion",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"config_recorder_all_regions_enabled"
]
},
{
"Id": "op.exp.1.aws.sys.1",
"Description": "Inventario de activos",
@@ -586,81 +532,6 @@
"ssm_managed_compliant_patching"
]
},
{
"Id": "op.exp.1.aws.sys.2",
"Description": "Inventario de activos",
"Attributes": [
{
"IdGrupoControl": "op.exp.1",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Asignar metadatos personalizados a cada nodo administrado con información sobre el responsable del activo.",
"Nivel": "alto",
"Tipo": "requisito",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"organizations_tags_policies_enabled_and_attached"
]
},
{
"Id": "op.exp.1.aws.re.1",
"Description": "Inventario de activos",
"Attributes": [
{
"IdGrupoControl": "op.exp.1",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Se recomienda el uso de AWS Resource Explorer para la exploración de los recursos como instancias RDB, buckets S3o tablas de Amazon DynamoDB.",
"Nivel": "alto",
"Tipo": "recomendacion",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"resourceexplorer2_indexes_found"
]
},
{
"Id": "op.exp.1.aws.tag.1",
"Description": "Inventario de activos",
"Attributes": [
{
"IdGrupoControl": "op.exp.1",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Para la correcta identificación del responsable, asociar etiquetas para todos los activos.",
"Nivel": "alto",
"Tipo": "requisito",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"organizations_tags_policies_enabled_and_attached"
]
},
{
"Id": "op.exp.3.aws.cfg.1",
"Description": "Gestión de la configuración de seguridad",
@@ -762,31 +633,6 @@
"ec2_instance_managed_by_ssm"
]
},
{
"Id": "op.exp.4.r4.aws.insp.1",
"Description": "Monitorización continua",
"Attributes": [
{
"IdGrupoControl": "op.exp.4.r4",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Desplegar a nivel de sistema una estrategia de monitorización continua de amenazas y vulnerabilidades detallando: indicadores críticos de seguridad, política de aplicación de parches y criterios de revisión regular y excepcional de amenazas del sistema.",
"Nivel": "opcional",
"Tipo": "refuerzo",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"inspector2_findings_exist"
]
},
{
"Id": "op.exp.5.aws.ct.1",
"Description": "Gestión de cambios",
@@ -1112,8 +958,7 @@
"Checks": [
"cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_insights_exist"
"cloudtrail_s3_dataevents_read_enabled"
]
},
{
@@ -1220,8 +1065,7 @@
],
"Checks": [
"iam_policy_allows_privilege_escalation",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_custom_policy_permissive_role_assumption",
"iam_policy_attached_only_to_group_or_roles",
"iam_role_cross_service_confused_deputy_prevention"
@@ -1250,27 +1094,6 @@
"s3_bucket_policy_public_write_access"
]
},
{
"Id": "op.exp.8.r4.aws.ct.3",
"Description": "Control de acceso",
"Attributes": [
{
"IdGrupoControl": "op.exp.8.r4",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Activar el acceso por MFA al registro de actividad almacenado en los buckets de Amazon S3 dedicados para AWS CloudTrail.",
"Nivel": "alto",
"Tipo": "refuerzo",
"Dimensiones": [
"trazabilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"cloudtrail_bucket_requires_mfa_delete"
]
},
{
"Id": "op.exp.8.r4.aws.ct.4",
"Description": "Control de acceso",
@@ -1380,107 +1203,6 @@
"cloudtrail_multi_region_enabled"
]
},
{
"Id": "op.exp.9.aws.img.1",
"Description": "Registro de la gestión de incidentes",
"Attributes": [
{
"IdGrupoControl": "op.exp.9",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Habilitar AWS Incident Manager y AWS CloudTrail en todas las regiones con el fin de recopilar información para generar contenido prescriptivo para la creación de informes exigidos por la medida de seguridad.",
"Nivel": "alto",
"Tipo": "recomendacion",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"ec2_instance_managed_by_ssm",
"ssmincidents_enabled_with_plans"
]
},
{
"Id": "op.exp.10.aws.tag.1",
"Description": "Protección de claves criptográficas",
"Attributes": [
{
"IdGrupoControl": "op.exp.10",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Se recomienda utilizar tags y alias para una mejor gestión y administración de las claves.",
"Nivel": "alto",
"Tipo": "recomendacion",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"organizations_tags_policies_enabled_and_attached"
]
},
{
"Id": "op.exp.10.aws.cmk.1",
"Description": "Protección de claves criptográficas",
"Attributes": [
{
"IdGrupoControl": "op.exp.10",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Los usuarios o roles con privilegios para la creación de claves deben ser diferentes a los que van a utilizar las claves para operaciones de cifrado.",
"Nivel": "alto",
"Tipo": "requisito",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"iam_policy_no_full_access_to_kms"
]
},
{
"Id": "op.exp.10.aws.cmk.2",
"Description": "Protección de claves criptográficas",
"Attributes": [
{
"IdGrupoControl": "op.exp.10",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Utilizar claves gestionadas por los clientes (CMK).",
"Nivel": "alto",
"Tipo": "requisito",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"iam_policy_no_full_access_to_kms"
]
},
{
"Id": "op.exp.10.aws.cmk.3",
"Description": "Protección de claves criptográficas",
@@ -1656,31 +1378,6 @@
"guardduty_is_enabled"
]
},
{
"Id": "op.mon.1.aws.gd.3",
"Description": "Detección de intrusión",
"Attributes": [
{
"IdGrupoControl": "op.mon.1",
"Marco": "operacional",
"Categoria": "monitorización del sistema",
"DescripcionControl": "Todas las cuentas miembro deberán estar añadidas para la supervisión bajo la cuenta raíz.",
"Nivel": "alto",
"Tipo": "medida",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"guardduty_centrally_managed"
]
},
{
"Id": "op.mon.2.aws.sh.1",
"Description": "Sistema de métricas",
@@ -1831,31 +1528,6 @@
"securityhub_enabled"
]
},
{
"Id": "op.mon.3.r2.aws.insp.1",
"Description": "Análisis dinámico",
"Attributes": [
{
"IdGrupoControl": "op.mon.3.r2",
"Marco": "operacional",
"Categoria": "monitorización del sistema",
"DescripcionControl": "Utilizar la herramienta Inspector para la detección de posibles vulneerabilidades de las instancias EC2, las funciones Lambda y las imágenes de contenedor.",
"Nivel": "alto",
"Tipo": "refuerzo",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"inspector2_findings_exist"
]
},
{
"Id": "op.mon.3.r3.aws.gd.1",
"Description": "Ciberamenazas avanzadas",
@@ -1906,31 +1578,6 @@
"config_recorder_all_regions_enabled"
]
},
{
"Id": "op.mon.3.r6.aws.insp.1",
"Description": "Inspecciones de seguridad",
"Attributes": [
{
"IdGrupoControl": "op.mon.3.r6",
"Marco": "operacional",
"Categoria": "monitorización del sistema",
"DescripcionControl": "Utilizar Config Rules y AWS Inspector.",
"Nivel": "alto",
"Tipo": "refuerzo",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"inspector2_findings_exist"
]
},
{
"Id": "mp.com.1.aws.sg.1",
"Description": "Perímetro seguro",
@@ -2057,56 +1704,6 @@
"elb_insecure_ssl_ciphers"
]
},
{
"Id": "mp.com.1.aws.nfw.1",
"Description": "Perímetro seguro",
"Attributes": [
{
"IdGrupoControl": "mp.com.1",
"Marco": "medidas de protección",
"Categoria": "protección de las comunicaciones",
"DescripcionControl": "Filtrar todo el tráfico entrante y saliente de la VPC a través de Firewalls de red.",
"Nivel": "alto",
"Tipo": "requisito",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"networkfirewall_in_all_vpc"
]
},
{
"Id": "mp.com.1.aws.nfw.2",
"Description": "Perímetro seguro",
"Attributes": [
{
"IdGrupoControl": "mp.com.1",
"Marco": "medidas de protección",
"Categoria": "protección de las comunicaciones",
"DescripcionControl": "Incidir en la utilización de AWS Firewall Manager para gestionar los firewalls de forma centralizada.",
"Nivel": "alto",
"Tipo": "requisito",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"networkfirewall_in_all_vpc"
]
},
{
"Id": "mp.com.1.aws.s3.1",
"Description": "Perímetro seguro",
@@ -2245,31 +1842,6 @@
"cloudfront_distributions_https_enabled"
]
},
{
"Id": "mp.com.4.aws.vpc.1",
"Description": "Separación de flujos de información en la red",
"Attributes": [
{
"IdGrupoControl": "mp.com.4",
"Marco": "medidas de protección",
"Categoria": "protección de las comunicaciones",
"DescripcionControl": "Los flujos de información de red se deben separar a través de la utilización de diferentes subnets.",
"Nivel": "alto",
"Tipo": "requisito",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"vpc_subnet_separate_private_public"
]
},
{
"Id": "mp.com.4.aws.vpc.2",
"Description": "Separación de flujos de información en la red",
@@ -2295,31 +1867,6 @@
"ec2_instance_internet_facing_with_instance_profile"
]
},
{
"Id": "mp.com.4.r1.aws.vpc.1",
"Description": "Segmentación lógica avanzada",
"Attributes": [
{
"IdGrupoControl": "mp.com.4.r1",
"Marco": "medidas de protección",
"Categoria": "protección de las comunicaciones",
"DescripcionControl": "Implementar la segmentación a través de la utilización de diferentes VPCs.",
"Nivel": "alto",
"Tipo": "refuerzo",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"vpc_subnet_separate_private_public"
]
},
{
"Id": "mp.com.4.r2.aws.vpc.1",
"Description": "Segmentación lógica avanzada",
@@ -2345,28 +1892,6 @@
"vpc_peering_routing_tables_with_least_privilege"
]
},
{
"Id": "mp.com.4.r3.aws.vpc.1",
"Description": "Segmentación física",
"Attributes": [
{
"IdGrupoControl": "mp.com.4.r3",
"Marco": "medidas de protección",
"Categoria": "protección de las comunicaciones",
"DescripcionControl": "Implementar la segmentación a través de diferentes VPCs situadas en diferentes ubicaciones.",
"Nivel": "alto",
"Tipo": "refuerzo",
"Dimensiones": [
"confidencialidad",
"integridad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"vpc_subnet_different_az"
]
},
{
"Id": "mp.si.2.aws.kms.1",
"Description": "Criptografía",
@@ -2658,7 +2183,7 @@
"Dimensiones": [
"disponibilidad"
],
"ModoEjecucion": "automático"
"ModoEjecucion": "automáticoop.pl.2.aws.warch.1"
}
],
"Checks": [
@@ -2670,27 +2195,6 @@
"shield_advanced_protection_in_route53_hosted_zones"
]
},
{
"Id": "mp.s.4.aws.as.1",
"Description": "Protección frente a la denegación de servicio ",
"Attributes": [
{
"IdGrupoControl": "mp.s.4",
"Marco": "medidas de protección",
"Categoria": "protección de los servicios",
"DescripcionControl": "Activar la solución AWS Auto Scaling para dotar a los sistemas de la capacidad suficiente para atender la carga prevista con holgura y desplegar tecnologías para la prevención de ataques conocidos.",
"Nivel": "alto",
"Tipo": "requisito",
"Dimensiones": [
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"autoscaling_group_multiple_az"
]
},
{
"Id": "op.pl.2.aws.warch.1",
"Description": "Sistema de gestión",
@@ -3957,27 +3461,6 @@
],
"Checks": []
},
{
"Id": "op.cont.3.aws.drs.1",
"Description": "Pruebas periódicas",
"Attributes": [
{
"IdGrupoControl": "op.cont.3",
"Marco": "operacional",
"Categoria": "continuidad del servicio",
"DescripcionControl": "La organización puede hacer uso del servicio AWS Elastic Disaster Recovery, programando y ejecutando pruebas no disruptivas (simulacros que no afectan ni al servidor de origen ni a la replicación de datos en curso) que prueben el correcto funcionamiento de las recuperaciones del plan de continuidad.",
"Nivel": "alto",
"Tipo": "recomendacion",
"Dimensiones": [
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"drs_job_exist"
]
},
{
"Id": "op.mon.1.aws.gd.4",
"Description": "Detección de intrusión",
@@ -4144,50 +3627,6 @@
],
"Checks": []
},
{
"Id": "mp.info.6.aws.bcku.1",
"Description": "Copias de seguridad",
"Attributes": [
{
"IdGrupoControl": "mp.info.6",
"Marco": "medidas de protección",
"Categoria": "protección de la información",
"DescripcionControl": "Para los procedimientos de respaldo de cualquiera de los dos entornos (local y nube) y siempre y cuando se utilicen recursos compatibles en el entorno local, la entidad puede hacer uso de AWS Backup, que permite elaboración de planes de respaldo y la definición de reglas de frecuencia, ciclo de vida, lugar de almacenamiento y etiquetado de las copias de seguridad.",
"Nivel": "alto",
"Tipo": "recomendacion",
"Dimensiones": [
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"backup_plans_exist",
"backup_vaults_exist",
"backup_reportplans_exist"
]
},
{
"Id": "mp.info.6.aws.tag.1",
"Description": "Copias de seguridad",
"Attributes": [
{
"IdGrupoControl": "mp.info.6",
"Marco": "medidas de protección",
"Categoria": "protección de la información",
"DescripcionControl": "Los planes de respaldo se pueden integrar con AWS Tags, acotando con base en las etiquetas de los recursos el alcance de cada proceso de copiado.",
"Nivel": "alto",
"Tipo": "recomendacion",
"Dimensiones": [
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"organizations_tags_policies_enabled_and_attached"
]
},
{
"Id": "mp.info.6.r2.aws.bcku.1",
"Description": "Protección de las copias de seguridad",

View File

@@ -26,9 +26,9 @@
"opensearch_service_domains_cloudwatch_logging_enabled",
"guardduty_is_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -58,9 +58,9 @@
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",

View File

@@ -20,8 +20,7 @@
"guardduty_is_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -92,9 +91,9 @@
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -145,9 +144,9 @@
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
@@ -189,9 +188,9 @@
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -250,9 +249,9 @@
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials"
]
@@ -270,8 +269,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -291,8 +290,7 @@
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -976,8 +974,7 @@
],
"Checks": [
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

View File

@@ -418,9 +418,9 @@
],
"Checks": [
"ec2_instance_profile_attached",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -519,8 +519,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges"
]
},
{
@@ -536,8 +536,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges"
]
},
{
@@ -553,8 +553,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key"
@@ -579,8 +579,8 @@
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_rotate_access_key_90_days",
@@ -755,9 +755,9 @@
"Checks": [
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

View File

@@ -35,8 +35,7 @@
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",

View File

@@ -82,9 +82,9 @@
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -166,9 +166,9 @@
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",

View File

@@ -45,8 +45,7 @@
"elb_ssl_listeners",
"emr_cluster_master_nodes_no_public_ip",
"opensearch_service_domains_encryption_at_rest_enabled",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
@@ -115,8 +114,7 @@
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
@@ -171,8 +169,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials"
]
@@ -204,8 +201,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -259,8 +255,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -276,8 +271,7 @@
],
"Checks": [
"iam_password_policy_reuse_24",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
"iam_disable_90_days_credentials",
@@ -518,8 +512,7 @@
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_user_mfa_enabled_console_access",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -21,8 +21,7 @@
"ec2_instance_public_ip",
"eks_endpoints_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -61,8 +60,7 @@
"ec2_instance_public_ip",
"eks_endpoints_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -127,8 +125,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials"
]
@@ -145,8 +142,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials"
]
@@ -163,8 +159,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -180,8 +175,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -455,8 +449,7 @@
"ec2_ebs_public_snapshot",
"ec2_instance_managed_by_ssm",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_url_public",
"rds_snapshots_public_access",
@@ -828,8 +821,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

View File

@@ -101,8 +101,7 @@
"cloudtrail_cloudwatch_logging_enabled",
"guardduty_is_enabled",
"iam_password_policy_reuse_24",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
@@ -126,8 +125,7 @@
],
"Checks": [
"ec2_ebs_public_snapshot",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_url_public",
@@ -182,8 +180,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -218,8 +215,7 @@
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_url_public",
@@ -850,8 +846,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

View File

@@ -19,8 +19,7 @@
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -161,8 +160,7 @@
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -240,8 +238,7 @@
],
"Checks": [
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -277,9 +274,9 @@
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
@@ -351,8 +348,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -380,8 +376,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -409,8 +404,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -438,8 +432,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -467,8 +460,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -496,8 +488,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -525,8 +516,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -554,8 +544,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -582,8 +571,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -611,8 +599,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -645,8 +632,7 @@
"iam_no_root_access_key",
"iam_root_mfa_enabled",
"iam_root_hardware_mfa_enabled",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_password_policy_minimum_length_14",
"ec2_instance_imdsv2_enabled"
@@ -669,8 +655,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -698,8 +683,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -727,8 +711,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -758,8 +741,7 @@
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -789,8 +771,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -841,8 +822,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -886,8 +866,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -915,8 +894,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -944,8 +922,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -1069,8 +1046,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -1094,8 +1070,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges"
]
},
{
@@ -1115,8 +1091,7 @@
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -1144,8 +1119,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -1163,8 +1138,7 @@
],
"Checks": [
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -1203,8 +1177,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -1458,8 +1432,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -2633,9 +2606,9 @@
"ec2_instance_profile_attached",
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
@@ -2713,8 +2686,7 @@
"ec2_instance_profile_attached",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -2969,8 +2941,7 @@
"ec2_ebs_default_encryption",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -3941,8 +3912,7 @@
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -5413,8 +5383,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -5457,8 +5426,7 @@
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",

View File

@@ -569,8 +569,7 @@
],
"Checks": [
"iam_password_policy_reuse_24",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
"iam_disable_90_days_credentials",
@@ -625,8 +624,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials"
]
@@ -1078,8 +1076,7 @@
],
"Checks": [
"ec2_ebs_public_snapshot",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_url_public",
"rds_snapshots_public_access",

View File

@@ -156,8 +156,7 @@
],
"Checks": [
"iam_no_root_access_key",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_user_mfa_enabled_console_access",

View File

@@ -113,11 +113,9 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},

View File

@@ -46,8 +46,7 @@
],
"Checks": [
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_disable_90_days_credentials"
]
},
@@ -312,8 +311,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

File diff suppressed because one or more lines are too long

View File

@@ -1,8 +1,6 @@
### Account, Check and/or Region can be * to apply for all the cases.
### Resources and tags are lists that can have either Regex or Keywords.
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
### For each check you can except Accounts, Regions, Resources and/or Tags.
### Account, Check and/or Region can be * to apply for all the cases
### Resources is a list that can have either Regex or Keywords
### Tags is an optional list containing tuples of 'key=value'
########################### ALLOWLIST EXAMPLE ###########################
Allowlist:
Accounts:
@@ -14,19 +12,14 @@ Allowlist:
Resources:
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
"ec2_*":
Regions:
- "*"
Resources:
- "*" # Will ignore every EC2 check in every account and region
"*":
Regions:
- "*"
Resources:
- "test"
- "test" # Will ignore every resource containing the string "test" and the tags 'test=test' and 'project=test' in account 123456789012 and every region
Tags:
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and 'project=test' in account 123456789012 and every region
- "project=test"
"*":
Checks:
@@ -37,7 +30,7 @@ Allowlist:
Resources:
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
- ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
- "[[:alnum:]]+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
"*":
Regions:
- "*"
@@ -46,34 +39,6 @@ Allowlist:
Tags:
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
"*":
Checks:
"ecs_task_definitions_no_environment_secrets":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Accounts:
- "0123456789012"
Regions:
- "eu-west-1"
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
"123456789012":
Checks:
"*":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Resources:
- "test"
Tags:
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
# EXAMPLE: CONTROL TOWER (to migrate)
# When using Control Tower, guardrails prevent access to certain protected resources. The allowlist

View File

@@ -1,3 +1,4 @@
import json
import os
import pathlib
from datetime import datetime, timezone
@@ -10,30 +11,26 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "3.7.0"
boto3_user_agent_extra = "APN_1826889"
prowler_version = "3.3.3"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
square_logo_img = "https://user-images.githubusercontent.com/38561120/235905862-9ece5bd7-9aa3-4e48-807a-3a9035eb8bfb.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
azure_logo = "https://user-images.githubusercontent.com/38561120/235927375-b23e2e0f-8932-49ec-b59c-d89f61c8041d.png"
gcp_logo = "https://user-images.githubusercontent.com/38561120/235928332-eb4accdc-c226-4391-8e97-6ca86a91cf50.png"
orange_color = "\033[38;5;208m"
banner_color = "\033[1;92m"
# Compliance
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
compliance_aws_dir = f"{actual_directory}/../compliance/aws"
available_compliance_frameworks = []
for provider in ["aws", "gcp"]:
with os.scandir(f"{actual_directory}/../compliance/{provider}") as files:
files = [
file.name
for file in files
if file.is_file()
and file.name.endswith(".json")
and available_compliance_frameworks.append(file.name.removesuffix(".json"))
]
with os.scandir(compliance_aws_dir) as files:
files = [
file.name
for file in files
if file.is_file()
and file.name.endswith(".json")
and available_compliance_frameworks.append(file.name.removesuffix(".json"))
]
# AWS services-regions matrix json
aws_services_json_file = "aws_regions_by_service.json"
@@ -46,25 +43,22 @@ timestamp_iso = timestamp.isoformat(sep=" ", timespec="seconds")
csv_file_suffix = ".csv"
json_file_suffix = ".json"
json_asff_file_suffix = ".asff.json"
json_ocsf_file_suffix = ".ocsf.json"
html_file_suffix = ".html"
config_yaml = f"{pathlib.Path(os.path.dirname(os.path.realpath(__file__)))}/config.yaml"
def check_current_version():
def check_current_version(prowler_version):
try:
prowler_version_string = f"Prowler {prowler_version}"
release_response = requests.get(
"https://api.github.com/repos/prowler-cloud/prowler/tags"
)
latest_version = release_response.json()[0]["name"]
latest_version = json.loads(release_response)[0]["name"]
if latest_version != prowler_version:
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
return f"(latest is {latest_version}, upgrade for the latest features)"
else:
return f"{prowler_version_string} (it is the latest version, yay!)"
except Exception as error:
logger.error(f"{error.__class__.__name__}: {error}")
return f"{prowler_version_string}"
return "(it is the latest version, yay!)"
except Exception:
return ""
def change_config_var(variable, value):

View File

@@ -1,8 +1,6 @@
import functools
import importlib
import os
import re
import shutil
import sys
import traceback
from pkgutil import walk_packages
@@ -26,7 +24,6 @@ except KeyError:
except Exception:
sys.exit(1)
import prowler
from prowler.lib.utils.utils import open_file, parse_json_file
from prowler.providers.common.models import Audit_Metadata
from prowler.providers.common.outputs import Provider_Output_Options
@@ -93,9 +90,6 @@ def exclude_checks_to_run(checks_to_execute: set, excluded_checks: list) -> set:
def exclude_services_to_run(
checks_to_execute: set, excluded_services: list, provider: str
) -> set:
excluded_services = [
"awslambda" if service == "lambda" else service for service in excluded_services
]
# Recover checks from the input services
for service in excluded_services:
modules = recover_checks_from_provider(provider, service)
@@ -123,66 +117,6 @@ def parse_checks_from_file(input_file: str, provider: str) -> set:
return checks_to_execute
# Load checks from custom folder
def parse_checks_from_folder(audit_info, input_folder: str, provider: str) -> int:
try:
imported_checks = 0
# Check if input folder is a S3 URI
if provider == "aws" and re.search(
"^s3://([^/]+)/(.*?([^/]+))/$", input_folder
):
bucket = input_folder.split("/")[2]
key = ("/").join(input_folder.split("/")[3:])
s3_reource = audit_info.audit_session.resource("s3")
bucket = s3_reource.Bucket(bucket)
for obj in bucket.objects.filter(Prefix=key):
if not os.path.exists(os.path.dirname(obj.key)):
os.makedirs(os.path.dirname(obj.key))
bucket.download_file(obj.key, obj.key)
input_folder = key
# Import custom checks by moving the checks folders to the corresponding services
with os.scandir(input_folder) as checks:
for check in checks:
if check.is_dir():
check_module = input_folder + "/" + check.name
# Copy checks to specific provider/service folder
check_service = check.name.split("_")[0]
prowler_dir = prowler.__path__
prowler_module = f"{prowler_dir[0]}/providers/{provider}/services/{check_service}/{check.name}"
if os.path.exists(prowler_module):
shutil.rmtree(prowler_module)
shutil.copytree(check_module, prowler_module)
imported_checks += 1
return imported_checks
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
# Load checks from custom folder
def remove_custom_checks_module(input_folder: str, provider: str):
# Check if input folder is a S3 URI
s3_uri = False
if provider == "aws" and re.search("^s3://([^/]+)/(.*?([^/]+))/$", input_folder):
input_folder = ("/").join(input_folder.split("/")[3:])
s3_uri = True
with os.scandir(input_folder) as checks:
for check in checks:
if check.is_dir():
# Remove imported checks
check_service = check.name.split("_")[0]
prowler_dir = prowler.__path__
prowler_module = f"{prowler_dir[0]}/providers/{provider}/services/{check_service}/{check.name}"
if os.path.exists(prowler_module):
shutil.rmtree(prowler_module)
# If S3 URI, remove the downloaded folders
if s3_uri and os.path.exists(input_folder):
shutil.rmtree(input_folder)
def list_services(provider: str) -> set():
available_services = set()
checks_tuple = recover_checks_from_provider(provider)
@@ -196,12 +130,11 @@ def list_services(provider: str) -> set():
return sorted(available_services)
def list_categories(bulk_checks_metadata: dict) -> set():
def list_categories(provider: str, bulk_checks_metadata: dict) -> set():
available_categories = set()
for check in bulk_checks_metadata.values():
for cat in check.Categories:
if cat:
available_categories.add(cat)
available_categories.add(cat)
return available_categories
@@ -576,9 +509,6 @@ def update_audit_metadata(
def recover_checks_from_service(service_list: list, provider: str) -> list:
checks = set()
service_list = [
"awslambda" if service == "lambda" else service for service in service_list
]
for service in service_list:
modules = recover_checks_from_provider(provider, service)
if not modules:

View File

@@ -2,7 +2,10 @@ import sys
from pydantic import parse_obj_as
from prowler.lib.check.compliance_models import Compliance_Base_Model
from prowler.lib.check.compliance_models import (
Compliance_Base_Model,
Compliance_Requirement,
)
from prowler.lib.check.models import Check_Metadata_Model
from prowler.lib.logger import logger
@@ -19,7 +22,16 @@ def update_checks_metadata_with_compliance(
compliance_requirements = []
# Verify if check is in the requirement
if check in requirement.Checks:
# Include the requirement into the check's framework requirements
# Create the Compliance_Requirement
requirement = Compliance_Requirement(
Id=requirement.Id,
Description=requirement.Description,
Attributes=requirement.Attributes,
Checks=requirement.Checks,
)
# For the check metadata we don't need the "Checks" key
delattr(requirement, "Checks")
# Include the requirment into the check's framework requirements
compliance_requirements.append(requirement)
# Create the Compliance_Model
compliance = Compliance_Base_Model(

View File

@@ -8,8 +8,8 @@ from prowler.lib.logger import logger
# ENS - Esquema Nacional de Seguridad - España
class ENS_Requirement_Attribute_Nivel(str, Enum):
"""ENS V3 Requirement Attribute Level"""
class ENS_Requirements_Nivel(str, Enum):
"""ENS V3 Requirements Level"""
opcional = "opcional"
bajo = "bajo"
@@ -17,8 +17,8 @@ class ENS_Requirement_Attribute_Nivel(str, Enum):
alto = "alto"
class ENS_Requirement_Attribute_Dimensiones(str, Enum):
"""ENS V3 Requirement Attribute Dimensions"""
class ENS_Requirements_Dimensiones(str, Enum):
"""ENS V3 Requirements Dimensions"""
confidencialidad = "confidencialidad"
integridad = "integridad"
@@ -27,8 +27,8 @@ class ENS_Requirement_Attribute_Dimensiones(str, Enum):
disponibilidad = "disponibilidad"
class ENS_Requirement_Attribute_Tipos(str, Enum):
"""ENS Requirement Attribute Tipos"""
class ENS_Requirements_Tipos(str, Enum):
"""ENS Requirements Tipos"""
refuerzo = "refuerzo"
requisito = "requisito"
@@ -36,21 +36,21 @@ class ENS_Requirement_Attribute_Tipos(str, Enum):
medida = "medida"
class ENS_Requirement_Attribute(BaseModel):
"""ENS V3 Framework Requirement Attribute"""
class ENS_Requirements(BaseModel):
"""ENS V3 Framework Requirements"""
IdGrupoControl: str
Marco: str
Categoria: str
DescripcionControl: str
Tipo: ENS_Requirement_Attribute_Tipos
Nivel: ENS_Requirement_Attribute_Nivel
Dimensiones: list[ENS_Requirement_Attribute_Dimensiones]
Tipo: ENS_Requirements_Tipos
Nivel: ENS_Requirements_Nivel
Dimensiones: list[ENS_Requirements_Dimensiones]
# Generic Compliance Requirement Attribute
class Generic_Compliance_Requirement_Attribute(BaseModel):
"""Generic Compliance Requirement Attribute"""
# Generic Compliance Requirements
class Generic_Compliance_Requirements(BaseModel):
"""Generic Compliance Requirements"""
ItemId: str
Section: Optional[str]
@@ -60,27 +60,27 @@ class Generic_Compliance_Requirement_Attribute(BaseModel):
Soc_Type: Optional[str]
class CIS_Requirement_Attribute_Profile(str):
"""CIS Requirement Attribute Profile"""
class CIS_Requirements_Profile(str):
"""CIS Requirements Profile"""
Level_1 = "Level 1"
Level_2 = "Level 2"
class CIS_Requirement_Attribute_AssessmentStatus(str):
"""CIS Requirement Attribute Assessment Status"""
class CIS_Requirements_AssessmentStatus(str):
"""CIS Requirements Assessment Status"""
Manual = "Manual"
Automated = "Automated"
# CIS Requirement Attribute
class CIS_Requirement_Attribute(BaseModel):
"""CIS Requirement Attribute"""
# CIS Requirements
class CIS_Requirements(BaseModel):
"""CIS Requirements"""
Section: str
Profile: CIS_Requirement_Attribute_Profile
AssessmentStatus: CIS_Requirement_Attribute_AssessmentStatus
Profile: CIS_Requirements_Profile
AssessmentStatus: CIS_Requirements_AssessmentStatus
Description: str
RationaleStatement: str
ImpactStatement: str
@@ -90,71 +90,14 @@ class CIS_Requirement_Attribute(BaseModel):
References: str
# Well Architected Requirement Attribute
class AWS_Well_Architected_Requirement_Attribute(BaseModel):
"""AWS Well Architected Requirement Attribute"""
Name: str
WellArchitectedQuestionId: str
WellArchitectedPracticeId: str
Section: str
SubSection: Optional[str]
LevelOfRisk: str
AssessmentMethod: str
Description: str
ImplementationGuidanceUrl: str
# ISO27001 Requirement Attribute
class ISO27001_2013_Requirement_Attribute(BaseModel):
"""ISO27001 Requirement Attribute"""
Category: str
Objetive_ID: str
Objetive_Name: str
Check_Summary: str
# MITRE Requirement Attribute
class Mitre_Requirement_Attribute(BaseModel):
"""MITRE Requirement Attribute"""
AWSService: str
Category: str
Value: str
Comment: str
# MITRE Requirement
class Mitre_Requirement(BaseModel):
"""Mitre_Requirement holds the model for every MITRE requirement"""
Name: str
Id: str
Tactics: list[str]
SubTechniques: list[str]
Description: str
Platforms: list[str]
TechniqueURL: str
Attributes: list[Mitre_Requirement_Attribute]
Checks: list[str]
# Base Compliance Model
class Compliance_Requirement(BaseModel):
"""Compliance_Requirement holds the base model for every requirement within a compliance framework"""
Id: str
Description: str
Name: Optional[str]
Attributes: list[
Union[
CIS_Requirement_Attribute,
ENS_Requirement_Attribute,
Generic_Compliance_Requirement_Attribute,
ISO27001_2013_Requirement_Attribute,
AWS_Well_Architected_Requirement_Attribute,
]
Union[CIS_Requirements, ENS_Requirements, Generic_Compliance_Requirements]
]
Checks: list[str]
@@ -166,7 +109,7 @@ class Compliance_Base_Model(BaseModel):
Provider: str
Version: Optional[str]
Description: str
Requirements: list[Union[Mitre_Requirement, Compliance_Requirement]]
Requirements: list[Compliance_Requirement]
@root_validator(pre=True)
# noqa: F841 - since vulture raises unused variable 'cls'

View File

@@ -6,6 +6,7 @@ from prowler.config.config import (
available_compliance_frameworks,
check_current_version,
default_output_directory,
prowler_version,
)
from prowler.providers.aws.aws_provider import get_aws_available_regions
from prowler.providers.aws.lib.arn.arn import is_valid_arn
@@ -35,7 +36,8 @@ Detailed documentation at https://docs.prowler.cloud
self.parser.add_argument(
"-v",
"--version",
action="store_true",
action="version",
version=f"Prowler {prowler_version} {check_current_version(prowler_version)}",
help="show Prowler version",
)
# Common arguments parser
@@ -66,10 +68,6 @@ Detailed documentation at https://docs.prowler.cloud
if args:
sys.argv = args
if len(sys.argv) == 2 and sys.argv[1] in ("-v", "--version"):
print(check_current_version())
sys.exit(0)
# Set AWS as the default provider if no provider is supplied
if len(sys.argv) == 1:
sys.argv = self.__set_default_provider__(sys.argv)
@@ -124,8 +122,8 @@ Detailed documentation at https://docs.prowler.cloud
"--output-modes",
nargs="+",
help="Output modes, by default csv, html and json",
default=["csv", "json", "html", "json-ocsf"],
choices=["csv", "json", "json-asff", "html", "json-ocsf"],
default=["csv", "json", "html"],
choices=["csv", "json", "json-asff", "html"],
)
common_outputs_parser.add_argument(
"-F",
@@ -154,11 +152,6 @@ Detailed documentation at https://docs.prowler.cloud
common_outputs_parser.add_argument(
"-b", "--no-banner", action="store_true", help="Hide Prowler banner"
)
common_outputs_parser.add_argument(
"--slack",
action="store_true",
help="Send a summary of the execution with a Slack APP in your channel. Environment variables SLACK_API_TOKEN and SLACK_CHANNEL_ID are required (see more in https://docs.prowler.cloud/en/latest/tutorials/integrations/#slack).",
)
def __init_logging_parser__(self):
# Logging Options
@@ -233,12 +226,6 @@ Detailed documentation at https://docs.prowler.cloud
default=[],
# Pending validate choices
)
common_checks_parser.add_argument(
"-x",
"--checks-folder",
nargs="?",
help="Specify external directory with custom checks (each check must have a folder with the required files, see more in https://docs.prowler.cloud/en/latest/tutorials/misc/#custom-checks).",
)
def __init_list_checks_parser__(self):
# List checks options
@@ -258,7 +245,7 @@ Detailed documentation at https://docs.prowler.cloud
list_group.add_argument(
"--list-compliance-requirements",
nargs="+",
help="List compliance requirements for a given compliance framework",
help="List compliance requirements for a given requirement",
choices=available_compliance_frameworks,
)
list_group.add_argument(
@@ -289,11 +276,6 @@ Detailed documentation at https://docs.prowler.cloud
help="ARN of the role to be assumed",
# Pending ARN validation
)
aws_auth_subparser.add_argument(
"--mfa",
action="store_true",
help="IAM entity enforces MFA so you need to input the MFA ARN and the TOTP",
)
aws_auth_subparser.add_argument(
"-T",
"--session-duration",
@@ -435,7 +417,7 @@ Detailed documentation at https://docs.prowler.cloud
azure_auth_modes_group.add_argument(
"--browser-auth",
action="store_true",
help="Use browser authentication to log in against Azure, --tenant-id is required for this option",
help="Use browser authentication to log in against azure ",
)
azure_auth_modes_group.add_argument(
"--managed-identity-auth",
@@ -448,13 +430,7 @@ Detailed documentation at https://docs.prowler.cloud
"--subscription-ids",
nargs="+",
default=[],
help="Azure Subscription IDs to be scanned by Prowler",
)
azure_parser.add_argument(
"--tenant-id",
nargs="?",
default=None,
help="Azure Tenant ID to be used with --browser-auth option",
help="Azure subscription ids to be scanned by prowler",
)
def __init_gcp_parser__(self):
@@ -471,11 +447,3 @@ Detailed documentation at https://docs.prowler.cloud
metavar="FILE_PATH",
help="Authenticate using a Google Service Account Application Credentials JSON file",
)
# Subscriptions
gcp_subscriptions_subparser = gcp_parser.add_argument_group("Projects")
gcp_subscriptions_subparser.add_argument(
"--project-ids",
nargs="+",
default=[],
help="GCP Project IDs to be scanned by Prowler",
)

View File

@@ -8,15 +8,10 @@ from prowler.config.config import orange_color, timestamp
from prowler.lib.check.models import Check_Report
from prowler.lib.logger import logger
from prowler.lib.outputs.models import (
Check_Output_CSV_AWS_CIS,
Check_Output_CSV_AWS_ISO27001_2013,
Check_Output_CSV_AWS_Well_Architected,
Check_Output_CSV_CIS,
Check_Output_CSV_ENS_RD2022,
Check_Output_CSV_GCP_CIS,
Check_Output_CSV_Generic_Compliance,
Check_Output_MITRE_ATTACK,
generate_csv_fields,
unroll_list,
)
@@ -30,10 +25,7 @@ def add_manual_controls(output_options, audit_info, file_descriptors):
manual_finding.status = "INFO"
manual_finding.status_extended = "Manual check"
manual_finding.resource_id = "manual_check"
manual_finding.resource_name = "Manual check"
manual_finding.region = ""
manual_finding.location = ""
manual_finding.project_id = ""
fill_compliance(
output_options, manual_finding, audit_info, file_descriptors
)
@@ -90,88 +82,16 @@ def fill_compliance(output_options, finding, audit_info, file_descriptors):
elif compliance.Framework == "CIS" and "cis_" in str(
output_options.output_modes
):
compliance_output = (
"cis_" + compliance.Version + "_" + compliance.Provider.lower()
)
# Only with the version of CIS that was selected
if compliance_output in str(output_options.output_modes):
if "cis_" + compliance.Version + "_aws" in str(
output_options.output_modes
):
compliance_output = "cis_" + compliance.Version + "_aws"
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
for attribute in requirement.Attributes:
if compliance.Provider == "AWS":
compliance_row = Check_Output_CSV_AWS_CIS(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=audit_info.audited_account,
Region=finding.region,
AssessmentDate=timestamp.isoformat(),
Requirements_Id=requirement_id,
Requirements_Description=requirement_description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
)
csv_header = generate_csv_fields(
Check_Output_CSV_AWS_CIS
)
elif compliance.Provider == "GCP":
compliance_row = Check_Output_CSV_GCP_CIS(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
ProjectId=finding.project_id,
Location=finding.location,
AssessmentDate=timestamp.isoformat(),
Requirements_Id=requirement_id,
Requirements_Description=requirement_description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
ResourceName=finding.resource_name,
CheckId=finding.check_metadata.CheckID,
)
csv_header = generate_csv_fields(
Check_Output_CSV_GCP_CIS
)
elif (
"AWS-Well-Architected-Framework" in compliance.Framework
and compliance.Provider == "AWS"
):
compliance_output = compliance.Framework
if compliance.Version != "":
compliance_output += "_" + compliance.Version
if compliance.Provider != "":
compliance_output += "_" + compliance.Provider
compliance_output = compliance_output.lower().replace("-", "_")
if compliance_output in output_options.output_modes:
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
for attribute in requirement.Attributes:
compliance_row = Check_Output_CSV_AWS_Well_Architected(
compliance_row = Check_Output_CSV_CIS(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=audit_info.audited_account,
@@ -179,116 +99,23 @@ def fill_compliance(output_options, finding, audit_info, file_descriptors):
AssessmentDate=timestamp.isoformat(),
Requirements_Id=requirement_id,
Requirements_Description=requirement_description,
Requirements_Attributes_Name=attribute.Name,
Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId,
Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_SubSection=attribute.SubSection,
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
)
csv_header = generate_csv_fields(
Check_Output_CSV_AWS_Well_Architected
)
elif (
compliance.Framework == "ISO27001"
and compliance.Version == "2013"
and compliance.Provider == "AWS"
):
compliance_output = compliance.Framework
if compliance.Version != "":
compliance_output += "_" + compliance.Version
if compliance.Provider != "":
compliance_output += "_" + compliance.Provider
compliance_output = compliance_output.lower().replace("-", "_")
if compliance_output in output_options.output_modes:
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
requirement_name = requirement.Name
for attribute in requirement.Attributes:
compliance_row = Check_Output_CSV_AWS_ISO27001_2013(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=audit_info.audited_account,
Region=finding.region,
AssessmentDate=timestamp.isoformat(),
Requirements_Id=requirement_id,
Requirements_Name=requirement_name,
Requirements_Description=requirement_description,
Requirements_Attributes_Category=attribute.Category,
Requirements_Attributes_Objetive_ID=attribute.Objetive_ID,
Requirements_Attributes_Objetive_Name=attribute.Objetive_Name,
Requirements_Attributes_Check_Summary=attribute.Check_Summary,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
)
csv_header = generate_csv_fields(Check_Output_CSV_AWS_ISO27001_2013)
elif (
compliance.Framework == "MITRE-ATTACK"
and compliance.Version == ""
and compliance.Provider == "AWS"
):
compliance_output = compliance.Framework
if compliance.Version != "":
compliance_output += "_" + compliance.Version
if compliance.Provider != "":
compliance_output += "_" + compliance.Provider
compliance_output = compliance_output.lower().replace("-", "_")
if compliance_output in output_options.output_modes:
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
requirement_name = requirement.Name
attributes_aws_services = ""
attributes_categories = ""
attributes_values = ""
attributes_comments = ""
for attribute in requirement.Attributes:
attributes_aws_services += attribute.AWSService + "\n"
attributes_categories += attribute.Category + "\n"
attributes_values += attribute.Value + "\n"
attributes_comments += attribute.Comment + "\n"
compliance_row = Check_Output_MITRE_ATTACK(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=audit_info.audited_account,
Region=finding.region,
AssessmentDate=timestamp.isoformat(),
Requirements_Id=requirement_id,
Requirements_Description=requirement_description,
Requirements_Name=requirement_name,
Requirements_Tactics=unroll_list(requirement.Tactics),
Requirements_SubTechniques=unroll_list(
requirement.SubTechniques
),
Requirements_Platforms=unroll_list(requirement.Platforms),
Requirements_TechniqueURL=requirement.TechniqueURL,
Requirements_Attributes_AWSServices=attributes_aws_services,
Requirements_Attributes_Categories=attributes_categories,
Requirements_Attributes_Values=attributes_values,
Requirements_Attributes_Comments=attributes_comments,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
)
csv_header = generate_csv_fields(Check_Output_MITRE_ATTACK)
csv_header = generate_csv_fields(Check_Output_CSV_CIS)
else:
compliance_output = compliance.Framework
@@ -402,8 +229,8 @@ def display_compliance_table(
marcos[marco_categoria]["Bajo"] += 1
# Add results to table
for marco in sorted(marcos):
ens_compliance_table["Proveedor"].append(compliance.Provider)
for marco in marcos:
ens_compliance_table["Proveedor"].append("aws")
ens_compliance_table["Marco/Categoria"].append(marco)
ens_compliance_table["Estado"].append(marcos[marco]["Estado"])
ens_compliance_table["Opcional"].append(
@@ -448,7 +275,7 @@ def display_compliance_table(
print(
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
)
elif "cis_" in compliance_framework:
elif "cis_1." in compliance_framework:
sections = {}
cis_compliance_table = {
"Provider": [],
@@ -495,7 +322,7 @@ def display_compliance_table(
# Add results to table
sections = dict(sorted(sections.items()))
for section in sections:
cis_compliance_table["Provider"].append(compliance.Provider)
cis_compliance_table["Provider"].append("aws")
cis_compliance_table["Section"].append(section)
if sections[section]["Level 1"]["FAIL"] > 0:
cis_compliance_table["Level 1"].append(
@@ -543,77 +370,6 @@ def display_compliance_table(
print(
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
)
elif "mitre_attack" in compliance_framework:
tactics = {}
mitre_compliance_table = {
"Provider": [],
"Tactic": [],
"Status": [],
}
pass_count = fail_count = 0
for finding in findings:
check = bulk_checks_metadata[finding.check_metadata.CheckID]
check_compliances = check.Compliance
for compliance in check_compliances:
if (
"MITRE-ATTACK" in compliance.Framework
and compliance.Version in compliance_framework
):
compliance_fm = compliance.Framework
for requirement in compliance.Requirements:
for tactic in requirement.Tactics:
if tactic not in tactics:
tactics[tactic] = {"FAIL": 0, "PASS": 0}
if finding.status == "FAIL":
fail_count += 1
tactics[tactic]["FAIL"] += 1
elif finding.status == "PASS":
pass_count += 1
tactics[tactic]["PASS"] += 1
# Add results to table
tactics = dict(sorted(tactics.items()))
for tactic in tactics:
mitre_compliance_table["Provider"].append(compliance.Provider)
mitre_compliance_table["Tactic"].append(tactic)
if tactics[tactic]["FAIL"] > 0:
mitre_compliance_table["Status"].append(
f"{Fore.RED}FAIL({tactics[tactic]['FAIL']}){Style.RESET_ALL}"
)
else:
mitre_compliance_table["Status"].append(
f"{Fore.GREEN}PASS({tactics[tactic]['PASS']}){Style.RESET_ALL}"
)
if fail_count + pass_count < 1:
print(
f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL}.\n"
)
else:
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL} Framework:"
)
overview_table = [
[
f"{Fore.RED}{round(fail_count/(fail_count+pass_count)*100, 2)}% ({fail_count}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(pass_count/(fail_count+pass_count)*100, 2)}% ({pass_count}) PASS{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))
print(
f"\nFramework {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL} Results:"
)
print(
tabulate(
mitre_compliance_table, headers="keys", tablefmt="rounded_grid"
)
)
print(
f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}"
)
print(f"\nDetailed results of {compliance_fm} are in:")
print(
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
)
else:
print(f"\nDetailed results of {compliance_framework.upper()} are in:")
print(

View File

@@ -7,20 +7,15 @@ from prowler.config.config import (
html_file_suffix,
json_asff_file_suffix,
json_file_suffix,
json_ocsf_file_suffix,
)
from prowler.lib.logger import logger
from prowler.lib.outputs.html import add_html_header
from prowler.lib.outputs.models import (
Aws_Check_Output_CSV,
Azure_Check_Output_CSV,
Check_Output_CSV_AWS_CIS,
Check_Output_CSV_AWS_ISO27001_2013,
Check_Output_CSV_AWS_Well_Architected,
Check_Output_CSV_CIS,
Check_Output_CSV_ENS_RD2022,
Check_Output_CSV_GCP_CIS,
Check_Output_CSV_Generic_Compliance,
Check_Output_MITRE_ATTACK,
Gcp_Check_Output_CSV,
generate_csv_fields,
)
@@ -49,7 +44,7 @@ def initialize_file_descriptor(
"a",
)
if output_mode in ("json", "json-asff", "json-ocsf"):
if output_mode in ("json", "json-asff"):
file_descriptor.write("[")
elif "html" in output_mode:
add_html_header(file_descriptor, audit_info)
@@ -105,30 +100,6 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
)
file_descriptors.update({output_mode: file_descriptor})
elif output_mode == "json-ocsf":
filename = (
f"{output_directory}/{output_filename}{json_ocsf_file_suffix}"
)
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info
)
file_descriptors.update({output_mode: file_descriptor})
elif output_mode == "html":
filename = f"{output_directory}/{output_filename}{html_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info
)
file_descriptors.update({output_mode: file_descriptor})
elif isinstance(audit_info, GCP_Audit_Info):
if output_mode == "cis_2.0_gcp":
filename = f"{output_directory}/{output_filename}_cis_2.0_gcp{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info, Check_Output_CSV_GCP_CIS
)
file_descriptors.update({output_mode: file_descriptor})
elif isinstance(audit_info, AWS_Audit_Info):
if output_mode == "json-asff":
filename = f"{output_directory}/{output_filename}{json_asff_file_suffix}"
@@ -137,6 +108,15 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
)
file_descriptors.update({output_mode: file_descriptor})
elif output_mode == "html":
filename = (
f"{output_directory}/{output_filename}{html_file_suffix}"
)
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info
)
file_descriptors.update({output_mode: file_descriptor})
elif output_mode == "ens_rd2022_aws":
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
@@ -150,60 +130,14 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
elif output_mode == "cis_1.5_aws":
filename = f"{output_directory}/{output_filename}_cis_1.5_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info, Check_Output_CSV_AWS_CIS
filename, output_mode, audit_info, Check_Output_CSV_CIS
)
file_descriptors.update({output_mode: file_descriptor})
elif output_mode == "cis_1.4_aws":
filename = f"{output_directory}/{output_filename}_cis_1.4_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info, Check_Output_CSV_AWS_CIS
)
file_descriptors.update({output_mode: file_descriptor})
elif (
output_mode
== "aws_well_architected_framework_security_pillar_aws"
):
filename = f"{output_directory}/{output_filename}_aws_well_architected_framework_security_pillar_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Check_Output_CSV_AWS_Well_Architected,
)
file_descriptors.update({output_mode: file_descriptor})
elif (
output_mode
== "aws_well_architected_framework_reliability_pillar_aws"
):
filename = f"{output_directory}/{output_filename}_aws_well_architected_framework_reliability_pillar_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Check_Output_CSV_AWS_Well_Architected,
)
file_descriptors.update({output_mode: file_descriptor})
elif output_mode == "iso27001_2013_aws":
filename = f"{output_directory}/{output_filename}_iso27001_2013_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Check_Output_CSV_AWS_ISO27001_2013,
)
file_descriptors.update({output_mode: file_descriptor})
elif output_mode == "mitre_attack_aws":
filename = f"{output_directory}/{output_filename}_mitre_attack_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Check_Output_MITRE_ATTACK,
filename, output_mode, audit_info, Check_Output_CSV_CIS
)
file_descriptors.update({output_mode: file_descriptor})

View File

@@ -1,4 +1,3 @@
import importlib
import sys
from os import path
@@ -9,7 +8,6 @@ from prowler.config.config import (
prowler_version,
timestamp,
)
from prowler.lib.check.models import Check_Report_AWS, Check_Report_GCP
from prowler.lib.logger import logger
from prowler.lib.outputs.models import (
get_check_compliance,
@@ -18,13 +16,18 @@ from prowler.lib.outputs.models import (
unroll_tags,
)
from prowler.lib.utils.utils import open_file
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
def add_html_header(file_descriptor, audit_info):
try:
if not audit_info.profile:
audit_info.profile = "ENV"
if isinstance(audit_info.audited_regions, list):
audited_regions = " ".join(audit_info.audited_regions)
elif not audit_info.audited_regions:
audited_regions = "All Regions"
else:
audited_regions = audit_info.audited_regions
file_descriptor.write(
"""
<!DOCTYPE html>
@@ -111,9 +114,51 @@ def add_html_header(file_descriptor, audit_info):
</li>
</ul>
</div>
</div> """
+ get_assessment_summary(audit_info)
</div>
<div class="col-md-2">
<div class="card">
<div class="card-header">
AWS Assessment Summary
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>AWS Account:</b> """
+ audit_info.audited_account
+ """
</li>
<li class="list-group-item">
<b>AWS-CLI Profile:</b> """
+ audit_info.profile
+ """
</li>
<li class="list-group-item">
<b>Audited Regions:</b> """
+ audited_regions
+ """
</li>
</ul>
</div>
</div>
<div class="col-md-4">
<div class="card">
<div class="card-header">
AWS Credentials
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>User Id:</b> """
+ audit_info.audited_user_id
+ """
</li>
<li class="list-group-item">
<b>Caller Identity ARN:</b>
"""
+ audit_info.audited_identity_arn
+ """
</li>
</ul>
</div>
</div>
<div class="col-md-2">
<div class="card">
<div class="card-header">
@@ -160,44 +205,37 @@ def add_html_header(file_descriptor, audit_info):
"""
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit(1)
def fill_html(file_descriptor, finding, output_options):
try:
row_class = "p-3 mb-2 bg-success-custom"
if finding.status == "INFO":
row_class = "table-info"
elif finding.status == "FAIL":
row_class = "table-danger"
elif finding.status == "WARNING":
row_class = "table-warning"
file_descriptor.write(
f"""
<tr class="{row_class}">
<td>{finding.status}</td>
<td>{finding.check_metadata.Severity}</td>
<td>{finding.check_metadata.ServiceName}</td>
<td>{finding.location if isinstance(finding, Check_Report_GCP) else finding.region if isinstance(finding, Check_Report_AWS) else ""}</td>
<td>{finding.check_metadata.CheckID.replace("_", "<wbr>_")}</td>
<td>{finding.check_metadata.CheckTitle}</td>
<td>{finding.resource_id.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr>_")}</td>
<td>{parse_html_string(unroll_tags(finding.resource_tags))}</td>
<td>{finding.status_extended.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr>_")}</td>
<td><p class="show-read-more">{finding.check_metadata.Risk}</p></td>
<td><p class="show-read-more">{finding.check_metadata.Remediation.Recommendation.Text}</p> <a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
<td><p class="show-read-more">{parse_html_string(unroll_dict(get_check_compliance(finding, finding.check_metadata.Provider, output_options)))}</p></td>
</tr>
"""
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
row_class = "p-3 mb-2 bg-success-custom"
if finding.status == "INFO":
row_class = "table-info"
elif finding.status == "FAIL":
row_class = "table-danger"
elif finding.status == "WARNING":
row_class = "table-warning"
file_descriptor.write(
f"""
<tr class="{row_class}">
<td>{finding.status}</td>
<td>{finding.check_metadata.Severity}</td>
<td>{finding.check_metadata.ServiceName}</td>
<td>{finding.region}</td>
<td>{finding.check_metadata.CheckID.replace("_", "<wbr>_")}</td>
<td>{finding.check_metadata.CheckTitle}</td>
<td>{finding.resource_id.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr>_")}</td>
<td>{parse_html_string(unroll_tags(finding.resource_tags))}</td>
<td>{finding.status_extended.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr>_")}</td>
<td><p class="show-read-more">{finding.check_metadata.Risk}</p></td>
<td><p class="show-read-more">{finding.check_metadata.Remediation.Recommendation.Text}</p> <a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
<td><p class="show-read-more">{parse_html_string(unroll_dict(get_check_compliance(finding, finding.check_metadata.Provider, output_options)))}</p></td>
</tr>
"""
)
def fill_html_overview_statistics(stats, output_filename, output_directory):
@@ -333,215 +371,3 @@ def add_html_footer(output_filename, output_directory):
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
def get_aws_html_assessment_summary(audit_info):
try:
if isinstance(audit_info, AWS_Audit_Info):
if not audit_info.profile:
audit_info.profile = "ENV"
if isinstance(audit_info.audited_regions, list):
audited_regions = " ".join(audit_info.audited_regions)
elif not audit_info.audited_regions:
audited_regions = "All Regions"
else:
audited_regions = audit_info.audited_regions
return (
"""
<div class="col-md-2">
<div class="card">
<div class="card-header">
AWS Assessment Summary
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>AWS Account:</b> """
+ audit_info.audited_account
+ """
</li>
<li class="list-group-item">
<b>AWS-CLI Profile:</b> """
+ audit_info.profile
+ """
</li>
<li class="list-group-item">
<b>Audited Regions:</b> """
+ audited_regions
+ """
</li>
</ul>
</div>
</div>
<div class="col-md-4">
<div class="card">
<div class="card-header">
AWS Credentials
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>User Id:</b> """
+ audit_info.audited_user_id
+ """
</li>
<li class="list-group-item">
<b>Caller Identity ARN:</b> """
+ audit_info.audited_identity_arn
+ """
</li>
</ul>
</div>
</div>
"""
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
def get_azure_html_assessment_summary(audit_info):
try:
if isinstance(audit_info, Azure_Audit_Info):
printed_subscriptions = []
for key, value in audit_info.identity.subscriptions.items():
intermediate = key + " : " + value
printed_subscriptions.append(intermediate)
# check if identity is str(coming from SP) or dict(coming from browser or)
if isinstance(audit_info.identity.identity_id, dict):
html_identity = audit_info.identity.identity_id.get(
"userPrincipalName", "Identity not found"
)
else:
html_identity = audit_info.identity.identity_id
return (
"""
<div class="col-md-2">
<div class="card">
<div class="card-header">
Azure Assessment Summary
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>Azure Tenant IDs:</b> """
+ " ".join(audit_info.identity.tenant_ids)
+ """
</li>
<li class="list-group-item">
<b>Azure Tenant Domain:</b> """
+ audit_info.identity.domain
+ """
</li>
<li class="list-group-item">
<b>Azure Subscriptions:</b> """
+ " ".join(printed_subscriptions)
+ """
</li>
</ul>
</div>
</div>
<div class="col-md-4">
<div class="card">
<div class="card-header">
Azure Credentials
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>Azure Identity Type:</b> """
+ audit_info.identity.identity_type
+ """
</li>
<li class="list-group-item">
<b>Azure Identity ID:</b> """
+ html_identity
+ """
</li>
</ul>
</div>
</div>
"""
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
def get_gcp_html_assessment_summary(audit_info):
try:
if isinstance(audit_info, GCP_Audit_Info):
try:
getattr(audit_info.credentials, "_service_account_email")
profile = (
audit_info.credentials._service_account_email
if audit_info.credentials._service_account_email is not None
else "default"
)
except AttributeError:
profile = "default"
return (
"""
<div class="col-md-2">
<div class="card">
<div class="card-header">
GCP Assessment Summary
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>GCP Project IDs:</b> """
+ ", ".join(audit_info.project_ids)
+ """
</li>
</ul>
</div>
</div>
<div class="col-md-4">
<div class="card">
<div class="card-header">
GCP Credentials
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>GCP Account:</b> """
+ profile
+ """
</li>
</ul>
</div>
</div>
"""
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
def get_assessment_summary(audit_info):
"""
get_assessment_summary gets the HTML assessment summary for the provider
"""
try:
# This is based in the Provider_Audit_Info class
# It is not pretty but useful
# AWS_Audit_Info --> aws
# GCP_Audit_Info --> gcp
# Azure_Audit_Info --> azure
provider = audit_info.__class__.__name__.split("_")[0].lower()
# Dynamically get the Provider quick inventory handler
provider_html_assessment_summary_function = (
f"get_{provider}_html_assessment_summary"
)
return getattr(
importlib.import_module(__name__), provider_html_assessment_summary_function
)(audit_info)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit(1)

View File

@@ -4,299 +4,70 @@ import sys
from prowler.config.config import (
json_asff_file_suffix,
json_file_suffix,
json_ocsf_file_suffix,
prowler_version,
timestamp,
timestamp_utc,
)
from prowler.lib.logger import logger
from prowler.lib.outputs.models import (
Account,
Check_Output_JSON_OCSF,
Cloud,
Compliance,
Compliance_OCSF,
Feature,
Finding,
Group,
Metadata,
Organization,
Product,
ProductFields,
Remediation_OCSF,
Resource,
Resources,
Severity,
get_check_compliance,
unroll_dict_to_list,
)
from prowler.lib.utils.utils import hash_sha512, open_file
def fill_json_asff(finding_output, audit_info, finding, output_options):
try:
# Check if there are no resources in the finding
if finding.resource_arn == "":
if finding.resource_id == "":
finding.resource_id = "NONE_PROVIDED"
finding.resource_arn = finding.resource_id
finding_output.Id = f"prowler-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{hash_sha512(finding.resource_id)}"
finding_output.ProductArn = f"arn:{audit_info.audited_partition}:securityhub:{finding.region}::product/prowler/prowler"
finding_output.ProductFields = ProductFields(
ProviderVersion=prowler_version, ProwlerResourceName=finding.resource_arn
# Check if there are no resources in the finding
if finding.resource_arn == "":
if finding.resource_id == "":
finding.resource_id = "NONE_PROVIDED"
finding.resource_arn = finding.resource_id
finding_output.Id = f"prowler-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{hash_sha512(finding.resource_id)}"
finding_output.ProductArn = f"arn:{audit_info.audited_partition}:securityhub:{finding.region}::product/prowler/prowler"
finding_output.ProductFields = ProductFields(
ProviderVersion=prowler_version, ProwlerResourceName=finding.resource_arn
)
finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID
finding_output.AwsAccountId = audit_info.audited_account
finding_output.Types = finding.check_metadata.CheckType
finding_output.FirstObservedAt = (
finding_output.UpdatedAt
) = finding_output.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
finding_output.Severity = Severity(Label=finding.check_metadata.Severity.upper())
finding_output.Title = finding.check_metadata.CheckTitle
finding_output.Description = finding.status_extended
finding_output.Resources = [
Resource(
Id=finding.resource_arn,
Type=finding.check_metadata.ResourceType,
Partition=audit_info.audited_partition,
Region=finding.region,
)
finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID
finding_output.AwsAccountId = audit_info.audited_account
finding_output.Types = finding.check_metadata.CheckType
finding_output.FirstObservedAt = (
finding_output.UpdatedAt
) = finding_output.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
finding_output.Severity = Severity(
Label=finding.check_metadata.Severity.upper()
)
finding_output.Title = finding.check_metadata.CheckTitle
# Description should NOT be longer than 1024 characters
finding_output.Description = (
(finding.status_extended[:1000] + "...")
if len(finding.status_extended) > 1000
else finding.status_extended
)
finding_output.Resources = [
Resource(
Id=finding.resource_arn,
Type=finding.check_metadata.ResourceType,
Partition=audit_info.audited_partition,
Region=finding.region,
)
]
# Iterate for each compliance framework
compliance_summary = []
associated_standards = []
check_compliance = get_check_compliance(finding, "aws", output_options)
for key, value in check_compliance.items():
if (
len(associated_standards) < 20
): # AssociatedStandards should NOT have more than 20 items
associated_standards.append({"StandardsId": key})
item = f"{key} {' '.join(value)}"
if len(item) > 64:
item = item[0:63]
compliance_summary.append(item)
]
# Iterate for each compliance framework
compliance_summary = []
associated_standards = []
check_compliance = get_check_compliance(finding, "aws", output_options)
for key, value in check_compliance.items():
associated_standards.append({"StandardsId": key})
item = f"{key} {' '.join(value)}"
if len(item) > 64:
item = item[0:63]
compliance_summary.append(item)
# Ensures finding_status matches allowed values in ASFF
finding_status = generate_json_asff_status(finding.status)
# Add ED to PASS or FAIL (PASSED/FAILED)
finding_output.Compliance = Compliance(
Status=finding.status + "ED",
AssociatedStandards=associated_standards,
RelatedRequirements=compliance_summary,
)
finding_output.Remediation = {
"Recommendation": finding.check_metadata.Remediation.Recommendation
}
finding_output.Compliance = Compliance(
Status=finding_status,
AssociatedStandards=associated_standards,
RelatedRequirements=compliance_summary,
)
# Fill Recommendation Url if it is blank
if not finding.check_metadata.Remediation.Recommendation.Url:
finding.check_metadata.Remediation.Recommendation.Url = "https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
finding_output.Remediation = {
"Recommendation": finding.check_metadata.Remediation.Recommendation
}
return finding_output
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def generate_json_asff_status(status: str) -> str:
json_asff_status = ""
if status == "PASS":
json_asff_status = "PASSED"
elif status == "FAIL":
json_asff_status = "FAILED"
elif status == "WARNING":
json_asff_status = "WARNING"
else:
json_asff_status = "NOT_AVAILABLE"
return json_asff_status
def fill_json_ocsf(audit_info, finding, output_options) -> Check_Output_JSON_OCSF:
try:
resource_region = ""
resource_name = ""
resource_uid = ""
finding_uid = ""
project_uid = ""
resource_labels = finding.resource_tags if finding.resource_tags else []
aws_account_name = ""
aws_org_uid = ""
account = None
org = None
if (
hasattr(audit_info, "organizations_metadata")
and audit_info.organizations_metadata
):
aws_account_name = audit_info.organizations_metadata.account_details_name
aws_org_uid = audit_info.organizations_metadata.account_details_org
if finding.check_metadata.Provider == "aws":
account = Account(
name=aws_account_name,
uid=audit_info.audited_account,
)
org = Organization(
name=aws_org_uid,
uid=aws_org_uid,
)
resource_region = finding.region
resource_name = finding.resource_id
resource_uid = finding.resource_arn
finding_uid = f"prowler-{finding.check_metadata.Provider}-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{finding.resource_id}"
elif finding.check_metadata.Provider == "azure":
account = Account(
name=finding.subscription,
uid=finding.subscription,
)
org = Organization(
name=audit_info.identity.domain,
uid=audit_info.identity.domain,
)
resource_name = finding.resource_name
resource_uid = finding.resource_id
finding_uid = f"prowler-{finding.check_metadata.Provider}-{finding.check_metadata.CheckID}-{finding.subscription}-{finding.resource_id}"
elif finding.check_metadata.Provider == "gcp":
project_uid = finding.project_id
resource_region = finding.location
resource_name = finding.resource_name
resource_uid = finding.resource_id
finding_uid = f"prowler-{finding.check_metadata.Provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
cloud = Cloud(
provider=finding.check_metadata.Provider,
org=org,
account=account,
region=resource_region,
project_uid=project_uid,
)
finding_ocsf = Finding(
title=finding.check_metadata.CheckTitle,
uid=finding_uid,
desc=finding.check_metadata.Description,
supporting_data={
"Risk": finding.check_metadata.Risk,
"Notes": finding.check_metadata.Notes,
},
related_events=finding.check_metadata.DependsOn
+ finding.check_metadata.RelatedTo,
remediation=Remediation_OCSF(
kb_articles=list(
filter(
None,
[
finding.check_metadata.Remediation.Code.NativeIaC,
finding.check_metadata.Remediation.Code.Terraform,
finding.check_metadata.Remediation.Code.CLI,
finding.check_metadata.Remediation.Code.Other,
finding.check_metadata.Remediation.Recommendation.Url,
],
)
),
desc=finding.check_metadata.Remediation.Recommendation.Text,
),
types=finding.check_metadata.CheckType,
src_url=finding.check_metadata.RelatedUrl,
)
resources = []
resources.append(
Resources(
group=Group(name=finding.check_metadata.ServiceName),
region=resource_region,
name=resource_name,
labels=resource_labels,
uid=resource_uid,
type=finding.check_metadata.ResourceType,
details=finding.resource_details,
)
)
metadata = Metadata(
product=Product(
feature=Feature(
uid=finding.check_metadata.CheckID,
name=finding.check_metadata.CheckID,
)
),
original_time=timestamp.isoformat(),
profiles=[audit_info.profile]
if hasattr(audit_info, "organizations_metadata")
else [],
)
compliance = Compliance_OCSF(
status=generate_json_ocsf_status(finding.status),
status_detail=finding.status_extended,
requirements=unroll_dict_to_list(
get_check_compliance(
finding, finding.check_metadata.Provider, output_options
)
),
)
finding_output = Check_Output_JSON_OCSF(
finding=finding_ocsf,
resources=resources,
status_detail=finding.status_extended,
message=finding.status_extended,
severity=finding.check_metadata.Severity.capitalize(),
severity_id=generate_json_ocsf_severity_id(finding.check_metadata.Severity),
status=generate_json_ocsf_status(finding.status),
status_id=generate_json_ocsf_status_id(finding.status),
compliance=compliance,
cloud=cloud,
time=timestamp.isoformat(),
metadata=metadata,
)
return finding_output
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def generate_json_ocsf_status(status: str):
json_ocsf_status = ""
if status == "PASS":
json_ocsf_status = "Success"
elif status == "FAIL":
json_ocsf_status = "Failure"
elif status == "WARNING":
json_ocsf_status = "Other"
else:
json_ocsf_status = "Unknown"
return json_ocsf_status
def generate_json_ocsf_status_id(status: str):
json_ocsf_status_id = 0
if status == "PASS":
json_ocsf_status_id = 1
elif status == "FAIL":
json_ocsf_status_id = 2
elif status == "WARNING":
json_ocsf_status_id = 99
else:
json_ocsf_status_id = 0
return json_ocsf_status_id
def generate_json_ocsf_severity_id(severity: str):
json_ocsf_severity_id = 0
if severity == "low":
json_ocsf_severity_id = 2
elif severity == "medium":
json_ocsf_severity_id = 3
elif severity == "high":
json_ocsf_severity_id = 4
elif severity == "critical":
json_ocsf_severity_id = 5
return json_ocsf_severity_id
return finding_output
def close_json(output_filename, output_directory, mode):
@@ -305,8 +76,6 @@ def close_json(output_filename, output_directory, mode):
suffix = json_file_suffix
if mode == "json-asff":
suffix = json_asff_file_suffix
elif mode == "json-ocsf":
suffix = json_ocsf_file_suffix
filename = f"{output_directory}/{output_filename}{suffix}"
# Close JSON file if exists
if os.path.isfile(filename):

View File

@@ -1,39 +1,31 @@
import importlib
import sys
from csv import DictWriter
from datetime import datetime
from typing import Any, List, Literal, Optional
from typing import Any, List, Optional
from pydantic import BaseModel
from prowler.config.config import prowler_version, timestamp
from prowler.config.config import timestamp
from prowler.lib.check.models import Remediation
from prowler.lib.logger import logger
from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info
def get_check_compliance(finding, provider, output_options):
try:
check_compliance = {}
# We have to retrieve all the check's compliance requirements
if finding.check_metadata.CheckID in output_options.bulk_checks_metadata:
for compliance in output_options.bulk_checks_metadata[
finding.check_metadata.CheckID
].Compliance:
compliance_fw = compliance.Framework
if compliance.Version:
compliance_fw = f"{compliance_fw}-{compliance.Version}"
if compliance.Provider == provider.upper():
if compliance_fw not in check_compliance:
check_compliance[compliance_fw] = []
for requirement in compliance.Requirements:
check_compliance[compliance_fw].append(requirement.Id)
return check_compliance
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
check_compliance = {}
# We have to retrieve all the check's compliance requirements
for compliance in output_options.bulk_checks_metadata[
finding.check_metadata.CheckID
].Compliance:
compliance_fw = compliance.Framework
if compliance.Version:
compliance_fw = f"{compliance_fw}-{compliance.Version}"
if compliance.Provider == provider.upper():
if compliance_fw not in check_compliance:
check_compliance[compliance_fw] = []
for requirement in compliance.Requirements:
check_compliance[compliance_fw].append(requirement.Id)
return check_compliance
def generate_provider_output_csv(
@@ -43,6 +35,8 @@ def generate_provider_output_csv(
set_provider_output_options configures automatically the outputs based on the selected provider and returns the Provider_Output_Options object.
"""
try:
finding_output_model = f"{provider.capitalize()}_Check_Output_{mode.upper()}"
output_model = getattr(importlib.import_module(__name__), finding_output_model)
# Dynamically load the Provider_Output_Options class
finding_output_model = f"{provider.capitalize()}_Check_Output_{mode.upper()}"
output_model = getattr(importlib.import_module(__name__), finding_output_model)
@@ -229,18 +223,6 @@ def unroll_dict(dict: dict):
return unrolled_items
def unroll_dict_to_list(dict: dict):
list = []
for key, value in dict.items():
if type(value) == list:
value = ", ".join(value)
list.append(f"{key}: {value}")
else:
list.append(f"{key}: {value}")
return list
def parse_html_string(str: str):
string = ""
for elem in str.split(" | "):
@@ -265,7 +247,7 @@ def parse_json_tags(tags: list):
def generate_csv_fields(format: Any) -> list[str]:
"""Generates the CSV headers for the given class"""
csv_fields = []
# __fields__ is always available in the Pydantic's BaseModel class
# __fields__ is alwayis available in the Pydantic's BaseModel class
for field in format.__dict__.get("__fields__").keys():
csv_fields.append(field)
return csv_fields
@@ -376,7 +358,7 @@ def generate_provider_output_json(
)
if provider == "gcp":
finding_output.ProjectId = finding.project_id
finding_output.ProjectId = audit_info.project_id
finding_output.Location = finding.location
finding_output.ResourceId = finding.resource_id
finding_output.ResourceName = finding.resource_name
@@ -487,33 +469,6 @@ class Gcp_Check_Output_JSON(Check_Output_JSON):
super().__init__(**metadata)
class Check_Output_MITRE_ATTACK(BaseModel):
"""
Check_Output_MITRE_ATTACK generates a finding's output in CSV MITRE ATTACK format.
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Name: str
Requirements_Description: str
Requirements_Tactics: str
Requirements_SubTechniques: str
Requirements_Platforms: str
Requirements_TechniqueURL: str
Requirements_Attributes_AWSServices: str
Requirements_Attributes_Categories: str
Requirements_Attributes_Values: str
Requirements_Attributes_Comments: str
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
class Check_Output_CSV_ENS_RD2022(BaseModel):
"""
Check_Output_CSV_ENS_RD2022 generates a finding's output in CSV ENS RD2022 format.
@@ -539,7 +494,7 @@ class Check_Output_CSV_ENS_RD2022(BaseModel):
CheckId: str
class Check_Output_CSV_AWS_CIS(BaseModel):
class Check_Output_CSV_CIS(BaseModel):
"""
Check_Output_CSV_CIS generates a finding's output in CSV CIS format.
"""
@@ -567,35 +522,6 @@ class Check_Output_CSV_AWS_CIS(BaseModel):
CheckId: str
class Check_Output_CSV_GCP_CIS(BaseModel):
"""
Check_Output_CSV_CIS generates a finding's output in CSV CIS format.
"""
Provider: str
Description: str
ProjectId: str
Location: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_Section: str
Requirements_Attributes_Profile: str
Requirements_Attributes_AssessmentStatus: str
Requirements_Attributes_Description: str
Requirements_Attributes_RationaleStatement: str
Requirements_Attributes_ImpactStatement: str
Requirements_Attributes_RemediationProcedure: str
Requirements_Attributes_AuditProcedure: str
Requirements_Attributes_AdditionalInformation: str
Requirements_Attributes_References: str
Status: str
StatusExtended: str
ResourceId: str
ResourceName: str
CheckId: str
class Check_Output_CSV_Generic_Compliance(BaseModel):
"""
Check_Output_CSV_Generic_Compliance generates a finding's output in CSV Generic Compliance format.
@@ -619,51 +545,6 @@ class Check_Output_CSV_Generic_Compliance(BaseModel):
CheckId: str
class Check_Output_CSV_AWS_Well_Architected(BaseModel):
"""
Check_Output_CSV_AWS_Well_Architected generates a finding's output in CSV AWS Well Architected Compliance format.
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Attributes_Name: str
Requirements_Attributes_WellArchitectedQuestionId: str
Requirements_Attributes_WellArchitectedPracticeId: str
Requirements_Attributes_Section: str
Requirements_Attributes_SubSection: Optional[str]
Requirements_Attributes_LevelOfRisk: str
Requirements_Attributes_AssessmentMethod: str
Requirements_Attributes_Description: str
Requirements_Attributes_ImplementationGuidanceUrl: str
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
class Check_Output_CSV_AWS_ISO27001_2013(BaseModel):
"""
Check_Output_CSV_AWS_ISO27001_2013 generates a finding's output in CSV AWS ISO27001 Compliance format.
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Attributes_Category: str
Requirements_Attributes_Objetive_ID: str
Requirements_Attributes_Objetive_Name: str
Requirements_Attributes_Check_Summary: str
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
# JSON ASFF Output
class ProductFields(BaseModel):
ProviderName: str = "Prowler"
@@ -710,115 +591,3 @@ class Check_Output_JSON_ASFF(BaseModel):
Resources: List[Resource] = None
Compliance: Compliance = None
Remediation: dict = None
# JSON OCSF
class Remediation_OCSF(BaseModel):
kb_articles: List[str]
desc: str
class Finding(BaseModel):
title: str
desc: str
supporting_data: dict
remediation: Remediation_OCSF
types: List[str]
src_url: str
uid: str
related_events: List[str]
class Group(BaseModel):
name: str
class Resources(BaseModel):
group: Group
region: str
name: str
uid: str
labels: list
type: str
details: str
class Compliance_OCSF(BaseModel):
status: str
requirements: List[str]
status_detail: str
class Account(BaseModel):
name: str
uid: str
class Organization(BaseModel):
uid: str
name: str
class Cloud(BaseModel):
account: Optional[Account]
region: str
org: Optional[Organization]
provider: str
project_uid: str
class Feature(BaseModel):
name: str
uid: str
version: str = prowler_version
class Product(BaseModel):
language: str = "en"
name: str = "Prowler"
version: str = prowler_version
vendor_name: str = "Prowler/ProwlerPro"
feature: Feature
class Metadata(BaseModel):
original_time: str
profiles: List[str]
product: Product
version: str = "1.0.0-rc.3"
class Check_Output_JSON_OCSF(BaseModel):
"""
Check_Output_JSON_OCSF generates a finding's output in JSON OCSF format.
https://schema.ocsf.io/1.0.0-rc.3/classes/security_finding
"""
finding: Finding
resources: List[Resources]
status_detail: str
compliance: Compliance_OCSF
message: str
severity_id: Literal[0, 1, 2, 3, 4, 5, 6, 99]
severity: Literal[
"Informational", "Low", "Medium", "High", "Critical", "Fatal", "Other"
]
cloud: Cloud
time: datetime
metadata: Metadata
state_id: int = 0
state: str = "New"
status_id: Literal[0, 1, 2, 99]
status: Literal["Unknown", "Success", "Failure", "Other"]
type_uid: int = 200101
type_name: str = "Security Finding: Create"
impact_id: int = 0
impact: str = "Unknown"
confidence_id: int = 0
confidence: str = "Unknown"
activity_id: int = 1
activity_name: str = "Create"
category_uid: int = 2
category_name: str = "Findings"
class_uid: int = 2001
class_name: str = "Security Finding"

View File

@@ -9,14 +9,13 @@ from prowler.config.config import (
html_file_suffix,
json_asff_file_suffix,
json_file_suffix,
json_ocsf_file_suffix,
orange_color,
)
from prowler.lib.logger import logger
from prowler.lib.outputs.compliance import add_manual_controls, fill_compliance
from prowler.lib.outputs.file_descriptors import fill_file_descriptors
from prowler.lib.outputs.html import fill_html
from prowler.lib.outputs.json import fill_json_asff, fill_json_ocsf
from prowler.lib.outputs.json import fill_json_asff
from prowler.lib.outputs.models import (
Check_Output_JSON_ASFF,
generate_provider_output_csv,
@@ -86,24 +85,31 @@ def report(check_findings, output_options, audit_info):
if file_descriptors:
# Check if --quiet to only add fails to outputs
if not (finding.status != "FAIL" and output_options.is_quiet):
if any(
compliance in output_options.output_modes
for compliance in available_compliance_frameworks
):
fill_compliance(
output_options,
finding,
audit_info,
file_descriptors,
)
add_manual_controls(
output_options,
audit_info,
file_descriptors,
)
# AWS specific outputs
if finding.check_metadata.Provider == "aws":
if any(
compliance in output_options.output_modes
for compliance in available_compliance_frameworks
):
fill_compliance(
output_options,
finding,
audit_info,
file_descriptors,
)
add_manual_controls(
output_options,
audit_info,
file_descriptors,
)
if "html" in file_descriptors:
fill_html(
file_descriptors["html"], finding, output_options
)
file_descriptors["html"].write("")
if "json-asff" in file_descriptors:
finding_output = Check_Output_JSON_ASFF()
fill_json_asff(
@@ -131,10 +137,6 @@ def report(check_findings, output_options, audit_info):
)
# Common outputs
if "html" in file_descriptors:
fill_html(file_descriptors["html"], finding, output_options)
file_descriptors["html"].write("")
if "csv" in file_descriptors:
csv_writer, finding_output = generate_provider_output_csv(
finding.check_metadata.Provider,
@@ -161,19 +163,6 @@ def report(check_findings, output_options, audit_info):
)
file_descriptors["json"].write(",")
if "json-ocsf" in file_descriptors:
finding_output = fill_json_ocsf(
audit_info, finding, output_options
)
json.dump(
finding_output.dict(),
file_descriptors["json-ocsf"],
indent=4,
default=str,
)
file_descriptors["json-ocsf"].write(",")
else: # No service resources in the whole account
color = set_report_color("INFO")
if output_options.verbose:
@@ -221,8 +210,6 @@ def send_to_s3_bucket(
filename = f"{output_filename}{json_file_suffix}"
elif output_mode == "json-asff":
filename = f"{output_filename}{json_asff_file_suffix}"
elif output_mode == "json-ocsf":
filename = f"{output_filename}{json_ocsf_file_suffix}"
elif output_mode == "html":
filename = f"{output_filename}{html_file_suffix}"
else: # Compliance output mode

View File

@@ -1,135 +0,0 @@
import sys
from slack_sdk import WebClient
from prowler.config.config import aws_logo, azure_logo, gcp_logo, square_logo_img
from prowler.lib.logger import logger
def send_slack_message(token, channel, stats, provider, audit_info):
try:
client = WebClient(token=token)
identity, logo = create_message_identity(provider, audit_info)
response = client.chat_postMessage(
username="Prowler",
icon_url=square_logo_img,
channel="#" + channel,
blocks=create_message_blocks(identity, logo, stats),
)
return response
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def create_message_identity(provider, audit_info):
try:
identity = ""
logo = aws_logo
if provider == "aws":
identity = f"AWS Account *{audit_info.audited_account}*"
elif provider == "gcp":
identity = f"GCP Projects *{', '.join(audit_info.project_ids)}*"
logo = gcp_logo
elif provider == "azure":
printed_subscriptions = []
for key, value in audit_info.identity.subscriptions.items():
intermediate = "- *" + key + ": " + value + "*\n"
printed_subscriptions.append(intermediate)
identity = f"Azure Subscriptions:\n{''.join(printed_subscriptions)}"
logo = azure_logo
return identity, logo
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def create_message_blocks(identity, logo, stats):
try:
blocks = [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"Hey there 👋 \n I'm *Prowler*, _the handy cloud security tool_ :cloud::key:\n\n I have just finished the security assessment on your {identity} with a total of *{stats['findings_count']}* findings.",
},
"accessory": {
"type": "image",
"image_url": logo,
"alt_text": "Provider Logo",
},
},
{"type": "divider"},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"\n:white_check_mark: *{stats['total_pass']} Passed findings* ({round(stats['total_pass']/stats['findings_count']*100,2)}%)\n",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"\n:x: *{stats['total_fail']} Failed findings* ({round(stats['total_fail']/stats['findings_count']*100,2)}%)\n ",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"\n:bar_chart: *{stats['resources_count']} Scanned Resources*\n",
},
},
{"type": "divider"},
{
"type": "context",
"elements": [
{
"type": "mrkdwn",
"text": f"Used parameters: `prowler {' '.join(sys.argv[1:])} `",
}
],
},
{"type": "divider"},
{
"type": "section",
"text": {"type": "mrkdwn", "text": "Join our Slack Community!"},
"accessory": {
"type": "button",
"text": {"type": "plain_text", "text": "Prowler :slack:"},
"url": "https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "Feel free to contact us in our repo",
},
"accessory": {
"type": "button",
"text": {"type": "plain_text", "text": "Prowler :github:"},
"url": "https://github.com/prowler-cloud/prowler",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "See all the things you can do with ProwlerPro",
},
"accessory": {
"type": "button",
"text": {"type": "plain_text", "text": "Prowler Pro"},
"url": "https://prowler.pro",
},
},
]
return blocks
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)

View File

@@ -3,13 +3,6 @@ import sys
from colorama import Fore, Style
from tabulate import tabulate
from prowler.config.config import (
csv_file_suffix,
html_file_suffix,
json_asff_file_suffix,
json_file_suffix,
json_ocsf_file_suffix,
)
from prowler.lib.logger import logger
from prowler.providers.common.outputs import Provider_Output_Options
@@ -27,18 +20,15 @@ def display_summary_table(
entity_type = "Account"
audited_entities = audit_info.audited_account
elif provider == "azure":
if (
audit_info.identity.domain
!= "Unknown tenant domain (missing AAD permissions)"
):
if audit_info.identity.domain:
entity_type = "Tenant Domain"
audited_entities = audit_info.identity.domain
else:
entity_type = "Tenant ID/s"
audited_entities = " ".join(audit_info.identity.tenant_ids)
elif provider == "gcp":
entity_type = "Project ID/s"
audited_entities = ", ".join(audit_info.project_ids)
entity_type = "Project ID"
audited_entities = audit_info.project_id
if findings:
current = {
@@ -115,23 +105,13 @@ def display_summary_table(
)
print("\nDetailed results are in:")
if "html" in output_options.output_modes:
print(
f" - HTML: {output_directory}/{output_filename}{html_file_suffix}"
)
print(f" - HTML: {output_directory}/{output_filename}.html")
if "json-asff" in output_options.output_modes:
print(
f" - JSON-ASFF: {output_directory}/{output_filename}{json_asff_file_suffix}"
)
if "json-ocsf" in output_options.output_modes:
print(
f" - JSON-OCSF: {output_directory}/{output_filename}{json_ocsf_file_suffix}"
)
print(f" - JSON-ASFF: {output_directory}/{output_filename}.asff.json")
if "csv" in output_options.output_modes:
print(f" - CSV: {output_directory}/{output_filename}{csv_file_suffix}")
print(f" - CSV: {output_directory}/{output_filename}.csv")
if "json" in output_options.output_modes:
print(
f" - JSON: {output_directory}/{output_filename}{json_file_suffix}"
)
print(f" - JSON: {output_directory}/{output_filename}.json")
else:
print(

View File

@@ -4,7 +4,6 @@ import sys
import tempfile
from hashlib import sha512
from io import TextIOWrapper
from ipaddress import ip_address
from os.path import exists
from typing import Any
@@ -17,13 +16,10 @@ from prowler.lib.logger import logger
def open_file(input_file: str, mode: str = "r") -> TextIOWrapper:
try:
f = open(input_file, mode)
except OSError as ose:
if ose.strerror == "Too many open files":
logger.critical(
"Ooops! You reached your user session maximum open files. To solve this issue, increase the shell session limit by running this command `ulimit -n 4096`. For more info visit https://docs.prowler.cloud/en/latest/troubleshooting/"
)
else:
logger.critical(f"{input_file}: OSError[{ose.errno}] {ose.strerror}")
except OSError:
logger.critical(
"Ooops! You reached your user session maximum open files. To solve this issue, increase the shell session limit by running this command `ulimit -n 4096`. For more info visit https://docs.prowler.cloud/en/latest/troubleshooting/"
)
sys.exit(1)
except Exception as e:
logger.critical(
@@ -80,11 +76,3 @@ def detect_secrets_scan(data):
return detect_secrets_output[temp_data_file.name]
else:
return None
def validate_ip_address(ip_string):
try:
ip_address(ip_string)
return True
except ValueError:
return False

View File

@@ -2,7 +2,7 @@ import os
import pathlib
import sys
from boto3 import client, session
from boto3 import session
from botocore.credentials import RefreshableCredentials
from botocore.session import get_session
@@ -25,8 +25,8 @@ class AWS_Provider:
def set_session(self, audit_info):
try:
# If we receive a credentials object filled is coming form an assumed role, so renewal is needed
if audit_info.credentials:
# If we receive a credentials object filled is coming form an assumed role, so renewal is needed
logger.info("Creating session for assumed role ...")
# From botocore we can use RefreshableCredentials class, which has an attribute (refresh_using)
# that needs to be a method without arguments that retrieves a new set of fresh credentials
@@ -36,7 +36,7 @@ class AWS_Provider:
secret_key=audit_info.credentials.aws_secret_access_key,
token=audit_info.credentials.aws_session_token,
expiry_time=audit_info.credentials.expiration,
refresh_using=self.refresh_credentials,
refresh_using=self.refresh,
method="sts-assume-role",
)
# Here we need the botocore session since it needs to use refreshable credentials
@@ -52,43 +52,15 @@ class AWS_Provider:
# If we do not receive credentials start the session using the profile
else:
logger.info("Creating session for not assumed identity ...")
# Input MFA only if a role is not going to be assumed
if audit_info.mfa_enabled and not audit_info.assumed_role_info.role_arn:
mfa_ARN, mfa_TOTP = input_role_mfa_token_and_code()
get_session_token_arguments = {
"SerialNumber": mfa_ARN,
"TokenCode": mfa_TOTP,
}
sts_client = client("sts")
session_credentials = sts_client.get_session_token(
**get_session_token_arguments
)
return session.Session(
aws_access_key_id=session_credentials["Credentials"][
"AccessKeyId"
],
aws_secret_access_key=session_credentials["Credentials"][
"SecretAccessKey"
],
aws_session_token=session_credentials["Credentials"][
"SessionToken"
],
profile_name=audit_info.profile,
)
else:
return session.Session(
profile_name=audit_info.profile,
)
return session.Session(profile_name=audit_info.profile)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
logger.critical(f"{error.__class__.__name__} -- {error}")
sys.exit(1)
# Refresh credentials method using assume role
# This method is called "adding ()" to the name, so it cannot accept arguments
# https://github.com/boto/botocore/blob/098cc255f81a25b852e1ecdeb7adebd94c7b1b73/botocore/credentials.py#L570
def refresh_credentials(self):
def refresh(self):
logger.info("Refreshing assumed credentials...")
response = assume_role(self.aws_session, self.role_info)
@@ -107,53 +79,57 @@ class AWS_Provider:
def assume_role(session: session.Session, assumed_role_info: AWS_Assume_Role) -> dict:
try:
assume_role_arguments = {
"RoleArn": assumed_role_info.role_arn,
"RoleSessionName": "ProwlerAsessmentSession",
"DurationSeconds": assumed_role_info.session_duration,
}
if assumed_role_info.external_id:
assume_role_arguments["ExternalId"] = assumed_role_info.external_id
if assumed_role_info.mfa_enabled:
mfa_ARN, mfa_TOTP = input_role_mfa_token_and_code()
assume_role_arguments["SerialNumber"] = mfa_ARN
assume_role_arguments["TokenCode"] = mfa_TOTP
# set the info to assume the role from the partition, account and role name
sts_client = session.client("sts")
assumed_credentials = sts_client.assume_role(**assume_role_arguments)
# If external id, set it to the assume role api call
if assumed_role_info.external_id:
assumed_credentials = sts_client.assume_role(
RoleArn=assumed_role_info.role_arn,
RoleSessionName="ProwlerAsessmentSession",
DurationSeconds=assumed_role_info.session_duration,
ExternalId=assumed_role_info.external_id,
)
# else assume the role without the external id
else:
assumed_credentials = sts_client.assume_role(
RoleArn=assumed_role_info.role_arn,
RoleSessionName="ProwlerProAsessmentSession",
DurationSeconds=assumed_role_info.session_duration,
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
logger.critical(f"{error.__class__.__name__} -- {error}")
sys.exit(1)
else:
return assumed_credentials
def input_role_mfa_token_and_code() -> tuple[str]:
"""input_role_mfa_token_and_code ask for the AWS MFA ARN and TOTP and returns it."""
mfa_ARN = input("Enter ARN of MFA: ")
mfa_TOTP = input("Enter MFA code: ")
return (mfa_ARN.strip(), mfa_TOTP.strip())
def generate_regional_clients(
service: str, audit_info: AWS_Audit_Info, global_service: bool = False
) -> dict:
try:
regional_clients = {}
service_regions = get_available_aws_service_regions(service, audit_info)
# Get json locally
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
with open_file(f"{actual_directory}/{aws_services_json_file}") as f:
data = parse_json_file(f)
# Check if it is a subservice
json_regions = data["services"][service]["regions"][
audit_info.audited_partition
]
if audit_info.audited_regions: # Check for input aws audit_info.audited_regions
regions = list(
set(json_regions).intersection(audit_info.audited_regions)
) # Get common regions between input and json
else: # Get all regions from json of the service and partition
regions = json_regions
# Check if it is global service to gather only one region
if global_service:
if service_regions:
if audit_info.profile_region in service_regions:
service_regions = [audit_info.profile_region]
service_regions = service_regions[:1]
for region in service_regions:
if regions:
if audit_info.profile_region in regions:
regions = [audit_info.profile_region]
regions = regions[:1]
for region in regions:
regional_client = audit_info.audit_session.client(
service, region_name=region, config=audit_info.session_config
)
@@ -188,7 +164,7 @@ def get_checks_from_input_arn(audit_resources: list, provider: str) -> set:
checks_from_arn = set()
# Handle if there are audit resources so only their services are executed
if audit_resources:
services_without_subservices = ["guardduty", "kms", "s3", "elb", "efs"]
services_without_subservices = ["guardduty", "kms", "s3", "elb"]
service_list = set()
sub_service_list = set()
for resource in audit_resources:
@@ -199,10 +175,8 @@ def get_checks_from_input_arn(audit_resources: list, provider: str) -> set:
# Parse services when they are different in the ARNs
if service == "lambda":
service = "awslambda"
elif service == "elasticloadbalancing":
if service == "elasticloadbalancing":
service = "elb"
elif service == "elasticfilesystem":
service = "efs"
elif service == "logs":
service = "cloudwatch"
# Check if Prowler has checks in service
@@ -230,6 +204,7 @@ def get_checks_from_input_arn(audit_resources: list, provider: str) -> set:
sub_service_list.add(sub_service)
else:
sub_service_list.add(service)
checks = recover_checks_from_service(service_list, provider)
# Filter only checks with audited subservices
@@ -252,46 +227,3 @@ def get_regions_from_audit_resources(audit_resources: list) -> list:
if audited_regions:
return audited_regions
return None
def get_available_aws_service_regions(service: str, audit_info: AWS_Audit_Info) -> list:
# Get json locally
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
with open_file(f"{actual_directory}/{aws_services_json_file}") as f:
data = parse_json_file(f)
# Check if it is a subservice
json_regions = data["services"][service]["regions"][audit_info.audited_partition]
if audit_info.audited_regions: # Check for input aws audit_info.audited_regions
regions = list(
set(json_regions).intersection(audit_info.audited_regions)
) # Get common regions between input and json
else: # Get all regions from json of the service and partition
regions = json_regions
return regions
def get_default_region(service: str, audit_info: AWS_Audit_Info) -> str:
"""get_default_region gets the default region based on the profile and audited service regions"""
service_regions = get_available_aws_service_regions(service, audit_info)
default_region = get_global_region(
audit_info
) # global region of the partition when all regions are audited and there is no profile region
if audit_info.profile_region in service_regions:
# return profile region only if it is audited
default_region = audit_info.profile_region
# return first audited region if specific regions are audited
elif audit_info.audited_regions:
default_region = audit_info.audited_regions[0]
return default_region
def get_global_region(audit_info: AWS_Audit_Info) -> str:
"""get_global_region gets the global region based on the audited partition"""
global_region = "us-east-1"
if audit_info.audited_partition == "aws-cn":
global_region = "cn-north-1"
elif audit_info.audited_partition == "aws-us-gov":
global_region = "us-gov-east-1"
elif "aws-iso" in audit_info.audited_partition:
global_region = "aws-iso-global"
return global_region

File diff suppressed because it is too large Load Diff

View File

@@ -12,17 +12,7 @@ allowlist_schema = Schema(
"Accounts": {
str: {
"Checks": {
str: {
"Regions": list,
"Resources": list,
Optional("Tags"): list,
Optional("Exceptions"): {
Optional("Accounts"): list,
Optional("Regions"): list,
Optional("Resources"): list,
Optional("Tags"): list,
},
}
str: {"Regions": list, "Resources": list, Optional("Tags"): list}
}
}
}
@@ -79,22 +69,25 @@ def parse_allowlist_file(audit_info, allowlist_file):
dynamodb_items.update(response["Items"])
for item in dynamodb_items:
# Create allowlist for every item
allowlist["Accounts"][item["Accounts"]] = {
"Checks": {
item["Checks"]: {
"Regions": item["Regions"],
"Resources": item["Resources"],
if "Tags" in item:
allowlist["Accounts"][item["Accounts"]] = {
"Checks": {
item["Checks"]: {
"Regions": item["Regions"],
"Resources": item["Resources"],
"Tags": item["Tags"],
}
}
}
else:
allowlist["Accounts"][item["Accounts"]] = {
"Checks": {
item["Checks"]: {
"Regions": item["Regions"],
"Resources": item["Resources"],
}
}
}
}
if "Tags" in item:
allowlist["Accounts"][item["Accounts"]]["Checks"][item["Checks"]][
"Tags"
] = item["Tags"]
if "Exceptions" in item:
allowlist["Accounts"][item["Accounts"]]["Checks"][item["Checks"]][
"Exceptions"
] = item["Exceptions"]
else:
with open(allowlist_file) as f:
allowlist = yaml.safe_load(f)["Allowlist"]
@@ -115,22 +108,19 @@ def parse_allowlist_file(audit_info, allowlist_file):
def is_allowlisted(allowlist, audited_account, check, region, resource, tags):
try:
# By default is not allowlisted
is_finding_allowlisted = False
# First set account key from allowlist dict
if audited_account in allowlist["Accounts"]:
account = audited_account
if is_allowlisted_in_check(
allowlist, audited_account, check, region, resource, tags
):
return True
# If there is a *, it affects to all accounts
elif "*" in allowlist["Accounts"]:
account = "*"
# Test if it is allowlisted
allowlisted_checks = allowlist["Accounts"][account]["Checks"]
if is_allowlisted_in_check(
allowlisted_checks, audited_account, account, check, region, resource, tags
):
is_finding_allowlisted = True
return is_finding_allowlisted
if "*" in allowlist["Accounts"]:
audited_account = "*"
if is_allowlisted_in_check(
allowlist, audited_account, check, region, resource, tags
):
return True
return False
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
@@ -138,48 +128,22 @@ def is_allowlisted(allowlist, audited_account, check, region, resource, tags):
sys.exit(1)
def is_allowlisted_in_check(
allowlisted_checks, audited_account, account, check, region, resource, tags
):
def is_allowlisted_in_check(allowlist, audited_account, check, region, resource, tags):
try:
# Default value is not allowlisted
is_check_allowlisted = False
for allowlisted_check, allowlisted_check_info in allowlisted_checks.items():
# map lambda to awslambda
allowlisted_check = re.sub("^lambda", "awslambda", allowlisted_check)
# extract the exceptions
exceptions = allowlisted_check_info.get("Exceptions")
# Check if there are exceptions
if is_excepted(
exceptions,
audited_account,
region,
resource,
tags,
# If there is a *, it affects to all checks
if "*" in allowlist["Accounts"][audited_account]["Checks"]:
check = "*"
if is_allowlisted_in_region(
allowlist, audited_account, check, region, resource, tags
):
# Break loop and return default value since is excepted
break
allowlisted_regions = allowlisted_check_info.get("Regions")
allowlisted_resources = allowlisted_check_info.get("Resources")
allowlisted_tags = allowlisted_check_info.get("Tags")
# If there is a *, it affects to all checks
if (
"*" == allowlisted_check
or check == allowlisted_check
or re.search(allowlisted_check, check)
return True
# Check if there is the specific check
if check in allowlist["Accounts"][audited_account]["Checks"]:
if is_allowlisted_in_region(
allowlist, audited_account, check, region, resource, tags
):
if is_allowlisted_in_region(
allowlisted_regions,
allowlisted_resources,
allowlisted_tags,
region,
resource,
tags,
):
is_check_allowlisted = True
return is_check_allowlisted
return True
return False
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
@@ -187,26 +151,32 @@ def is_allowlisted_in_check(
sys.exit(1)
def is_allowlisted_in_region(
allowlist_regions, allowlist_resources, allowlisted_tags, region, resource, tags
):
def is_allowlisted_in_region(allowlist, audited_account, check, region, resource, tags):
try:
# By default is not allowlisted
is_region_allowlisted = False
# If there is a *, it affects to all regions
if "*" in allowlist_regions or region in allowlist_regions:
for elem in allowlist_resources:
if "*" in allowlist["Accounts"][audited_account]["Checks"][check]["Regions"]:
for elem in allowlist["Accounts"][audited_account]["Checks"][check][
"Resources"
]:
if is_allowlisted_in_tags(
allowlisted_tags,
allowlist["Accounts"][audited_account]["Checks"][check],
elem,
resource,
tags,
):
is_region_allowlisted = True
# if we find the element there is no point in continuing with the loop
break
return is_region_allowlisted
return True
# Check if there is the specific region
if region in allowlist["Accounts"][audited_account]["Checks"][check]["Regions"]:
for elem in allowlist["Accounts"][audited_account]["Checks"][check][
"Resources"
]:
if is_allowlisted_in_tags(
allowlist["Accounts"][audited_account]["Checks"][check],
elem,
resource,
tags,
):
return True
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
@@ -214,72 +184,24 @@ def is_allowlisted_in_region(
sys.exit(1)
def is_allowlisted_in_tags(allowlisted_tags, elem, resource, tags):
def is_allowlisted_in_tags(check_allowlist, elem, resource, tags):
try:
# By default is not allowlisted
is_tag_allowlisted = False
# Check if it is an *
if elem == "*":
elem = ".*"
# Check if there are allowlisted tags
if allowlisted_tags:
for allowlisted_tag in allowlisted_tags:
if re.search(allowlisted_tag, tags):
is_tag_allowlisted = True
break
if "Tags" in check_allowlist:
# Check if there are resource tags
if tags:
tags_in_resource_tags = True
for tag in check_allowlist["Tags"]:
if tag not in tags:
tags_in_resource_tags = False
if tags_in_resource_tags and re.search(elem, resource):
return True
else:
if re.search(elem, resource):
is_tag_allowlisted = True
return is_tag_allowlisted
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
sys.exit(1)
def is_excepted(exceptions, audited_account, region, resource, tags):
try:
excepted = False
is_account_excepted = False
is_region_excepted = False
is_resource_excepted = False
is_tag_excepted = False
if exceptions:
excepted_accounts = exceptions.get("Accounts", [])
excepted_regions = exceptions.get("Regions", [])
excepted_resources = exceptions.get("Resources", [])
excepted_tags = exceptions.get("Tags", [])
if exceptions:
if audited_account in excepted_accounts:
is_account_excepted = True
if region in excepted_regions:
is_region_excepted = True
for excepted_resource in excepted_resources:
if re.search(excepted_resource, resource):
is_resource_excepted = True
for tag in excepted_tags:
if tag in tags:
is_tag_excepted = True
if (
(
(excepted_accounts and is_account_excepted)
or not excepted_accounts
)
and (
(excepted_regions and is_region_excepted)
or not excepted_regions
)
and (
(excepted_resources and is_resource_excepted)
or not excepted_resources
)
and ((excepted_tags and is_tag_excepted) or not excepted_tags)
):
excepted = True
return excepted
return True
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"

View File

@@ -1,51 +1,53 @@
import re
from arnparse import arnparse
from prowler.providers.aws.lib.arn.error import (
RoleArnParsingEmptyResource,
RoleArnParsingFailedMissingFields,
RoleArnParsingIAMRegionNotEmpty,
RoleArnParsingInvalidAccountID,
RoleArnParsingInvalidResourceType,
RoleArnParsingPartitionEmpty,
RoleArnParsingServiceNotIAMnorSTS,
RoleArnParsingServiceNotIAM,
)
from prowler.providers.aws.lib.arn.models import ARN
def parse_iam_credentials_arn(arn: str) -> ARN:
arn_parsed = ARN(arn)
# First check if region is empty (in IAM ARN's region is always empty)
if arn_parsed.region:
raise RoleArnParsingIAMRegionNotEmpty
def arn_parsing(arn):
# check for number of fields, must be six
if len(arn.split(":")) != 6:
raise RoleArnParsingFailedMissingFields
else:
# check if needed fields are filled:
# - partition
# - service
# - account_id
# - resource_type
# - resource
if arn_parsed.partition is None or arn_parsed.partition == "":
raise RoleArnParsingPartitionEmpty
elif arn_parsed.service != "iam" and arn_parsed.service != "sts":
raise RoleArnParsingServiceNotIAMnorSTS
elif (
arn_parsed.account_id is None
or len(arn_parsed.account_id) != 12
or not arn_parsed.account_id.isnumeric()
):
raise RoleArnParsingInvalidAccountID
elif (
arn_parsed.resource_type != "role"
and arn_parsed.resource_type != "user"
and arn_parsed.resource_type != "assumed-role"
):
raise RoleArnParsingInvalidResourceType
elif arn_parsed.resource == "":
raise RoleArnParsingEmptyResource
arn_parsed = arnparse(arn)
# First check if region is empty (in IAM arns region is always empty)
if arn_parsed.region is not None:
raise RoleArnParsingIAMRegionNotEmpty
else:
return arn_parsed
# check if needed fields are filled:
# - partition
# - service
# - account_id
# - resource_type
# - resource
if arn_parsed.partition is None:
raise RoleArnParsingPartitionEmpty
elif arn_parsed.service != "iam":
raise RoleArnParsingServiceNotIAM
elif (
arn_parsed.account_id is None
or len(arn_parsed.account_id) != 12
or not arn_parsed.account_id.isnumeric()
):
raise RoleArnParsingInvalidAccountID
elif arn_parsed.resource_type != "role":
raise RoleArnParsingInvalidResourceType
elif arn_parsed.resource == "":
raise RoleArnParsingEmptyResource
else:
return arn_parsed
def is_valid_arn(arn: str) -> bool:
"""is_valid_arn returns True or False whether the given AWS ARN (Amazon Resource Name) is valid or not."""
regex = r"^arn:aws(-cn|-us-gov|-iso|-iso-b)?:[a-zA-Z0-9\-]+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:[a-zA-Z0-9\-_\/:]+(:\d+)?$"
regex = r"^arn:aws(-cn|-us-gov)?:[a-zA-Z0-9\-]+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:[a-zA-Z0-9\-_\/]+(:\d+)?$"
return re.match(regex, arn) is not None

View File

@@ -1,49 +1,43 @@
class RoleArnParsingFailedMissingFields(Exception):
# The ARN contains a numberof fields different than six separated by :"
# The arn contains a numberof fields different than six separated by :"
def __init__(self):
self.message = "The assumed role ARN contains an invalid number of fields separated by : or it does not start by arn, please input a valid ARN"
self.message = "The assumed role arn contains a number of fields different than six separated by :, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingIAMRegionNotEmpty(Exception):
# The ARN contains a non-empty value for region, since it is an IAM ARN is not valid
# The arn contains a non-empty value for region, since it is an IAM arn is not valid
def __init__(self):
self.message = "The assumed role ARN contains a non-empty value for region, since it is an IAM ARN is not valid, please input a valid ARN"
self.message = "The assumed role arn contains a non-empty value for region, since it is an IAM arn is not valid, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingPartitionEmpty(Exception):
# The ARN contains an empty value for partition
# The arn contains an empty value for partition
def __init__(self):
self.message = "The assumed role ARN does not contain a value for partition, please input a valid ARN"
self.message = "The assumed role arn does not contain a value for partition, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingServiceNotIAMnorSTS(Exception):
class RoleArnParsingServiceNotIAM(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for service distinct than IAM or STS, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingServiceNotSTS(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for service distinct than STS, please input a valid ARN"
self.message = "The assumed role arn contains a value for service distinct than iam, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingInvalidAccountID(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for account id empty or invalid, a valid account id must be composed of 12 numbers, please input a valid ARN"
self.message = "The assumed role arn contains a value for account id empty or invalid, a valid account id must be composed of 12 numbers, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingInvalidResourceType(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for resource type different than role, please input a valid ARN"
self.message = "The assumed role arn contains a value for resource type different than role, please input a valid arn"
super().__init__(self.message)
class RoleArnParsingEmptyResource(Exception):
def __init__(self):
self.message = "The assumed role ARN does not contain a value for resource, please input a valid ARN"
self.message = "The assumed role arn does not contain a value for resource, please input a valid arn"
super().__init__(self.message)

View File

@@ -1,57 +0,0 @@
from typing import Optional
from pydantic import BaseModel
from prowler.providers.aws.lib.arn.error import RoleArnParsingFailedMissingFields
class ARN(BaseModel):
partition: str
service: str
region: Optional[str] # In IAM ARN's do not have region
account_id: str
resource: str
resource_type: str
def __init__(self, arn):
# Validate the ARN
## Check that arn starts with arn
if not arn.startswith("arn:"):
raise RoleArnParsingFailedMissingFields
## Retrieve fields
arn_elements = arn.split(":", 5)
data = {
"partition": arn_elements[1],
"service": arn_elements[2],
"region": arn_elements[3] if arn_elements[3] != "" else None,
"account_id": arn_elements[4],
"resource": arn_elements[5],
"resource_type": get_arn_resource_type(arn, arn_elements[2]),
}
if "/" in data["resource"]:
data["resource"] = data["resource"].split("/", 1)[1]
elif ":" in data["resource"]:
data["resource"] = data["resource"].split(":", 1)[1]
# Calls Pydantic's BaseModel __init__
super().__init__(**data)
def get_arn_resource_type(arn, service):
if service == "s3":
resource_type = "bucket"
elif service == "sns":
resource_type = "topic"
elif service == "sqs":
resource_type = "queue"
elif service == "apigateway":
split_parts = arn.split(":")[5].split("/")
if "integration" in split_parts and "responses" in split_parts:
resource_type = "restapis-resources-methods-integration-response"
elif "documentation" in split_parts and "parts" in split_parts:
resource_type = "restapis-documentation-parts"
else:
resource_type = arn.split(":")[5].split("/")[1]
else:
resource_type = arn.split(":")[5].split("/")[0]
return resource_type

View File

@@ -1,7 +1,6 @@
from boto3 import session
from botocore.config import Config
from prowler.config.config import boto3_user_agent_extra
from prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Audit_Info
# Default Current Audit Info
@@ -13,12 +12,8 @@ current_audit_info = AWS_Audit_Info(
),
# Default standard retrier config
# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html
session_config=Config(
retries={"max_attempts": 3, "mode": "standard"},
user_agent_extra=boto3_user_agent_extra,
),
session_config=Config(retries={"max_attempts": 3, "mode": "standard"}),
audited_account=None,
audited_account_arn=None,
audited_user_id=None,
audited_partition=None,
audited_identity_arn=None,
@@ -29,9 +24,7 @@ current_audit_info = AWS_Audit_Info(
role_arn=None,
session_duration=None,
external_id=None,
mfa_enabled=None,
),
mfa_enabled=None,
audit_resources=None,
audited_regions=None,
organizations_metadata=None,

View File

@@ -19,7 +19,6 @@ class AWS_Assume_Role:
role_arn: str
session_duration: int
external_id: str
mfa_enabled: bool
@dataclass
@@ -38,14 +37,12 @@ class AWS_Audit_Info:
# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html
session_config: Config
audited_account: int
audited_account_arn: str
audited_identity_arn: str
audited_user_id: str
audited_partition: str
profile: str
profile_region: str
credentials: AWS_Credentials
mfa_enabled: bool
assumed_role_info: AWS_Assume_Role
audited_regions: list
audit_resources: list

View File

@@ -1,59 +0,0 @@
import sys
from boto3 import session
from colorama import Fore, Style
from prowler.lib.logger import logger
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
AWS_STS_GLOBAL_ENDPOINT_REGION = "us-east-1"
def validate_aws_credentials(session: session, input_regions: list) -> dict:
try:
# For a valid STS GetCallerIdentity we have to use the right AWS Region
if input_regions is None or len(input_regions) == 0:
if session.region_name is not None:
aws_region = session.region_name
else:
# If there is no region set passed with -f/--region
# we use the Global STS Endpoint Region, us-east-1
aws_region = AWS_STS_GLOBAL_ENDPOINT_REGION
else:
# Get the first region passed to the -f/--region
aws_region = input_regions[0]
validate_credentials_client = session.client("sts", aws_region)
caller_identity = validate_credentials_client.get_caller_identity()
# Include the region where the caller_identity has validated the credentials
caller_identity["region"] = aws_region
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit(1)
else:
return caller_identity
def print_aws_credentials(audit_info: AWS_Audit_Info):
# Beautify audited regions, set "all" if there is no filter region
regions = (
", ".join(audit_info.audited_regions)
if audit_info.audited_regions is not None
else "all"
)
# Beautify audited profile, set "default" if there is no profile set
profile = audit_info.profile if audit_info.profile is not None else "default"
report = f"""
This report is being generated using credentials below:
AWS-CLI Profile: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} AWS Filter Region: {Fore.YELLOW}[{regions}]{Style.RESET_ALL}
AWS Account: {Fore.YELLOW}[{audit_info.audited_account}]{Style.RESET_ALL} UserId: {Fore.YELLOW}[{audit_info.audited_user_id}]{Style.RESET_ALL}
Caller Identity ARN: {Fore.YELLOW}[{audit_info.audited_identity_arn}]{Style.RESET_ALL}
"""
# If -A is set, print Assumed Role ARN
if audit_info.assumed_role_info.role_arn is not None:
report += f"""Assumed Role ARN: {Fore.YELLOW}[{audit_info.assumed_role_info.role_arn}]{Style.RESET_ALL}
"""
print(report)

View File

@@ -1,40 +0,0 @@
import sys
from boto3 import client
from prowler.lib.logger import logger
from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info
def get_organizations_metadata(
metadata_account: str, assumed_credentials: dict
) -> AWS_Organizations_Info:
try:
organizations_client = client(
"organizations",
aws_access_key_id=assumed_credentials["Credentials"]["AccessKeyId"],
aws_secret_access_key=assumed_credentials["Credentials"]["SecretAccessKey"],
aws_session_token=assumed_credentials["Credentials"]["SessionToken"],
)
organizations_metadata = organizations_client.describe_account(
AccountId=metadata_account
)
list_tags_for_resource = organizations_client.list_tags_for_resource(
ResourceId=metadata_account
)
except Exception as error:
logger.critical(f"{error.__class__.__name__} -- {error}")
sys.exit(1)
else:
# Convert Tags dictionary to String
account_details_tags = ""
for tag in list_tags_for_resource["Tags"]:
account_details_tags += tag["Key"] + ":" + tag["Value"] + ","
organizations_info = AWS_Organizations_Info(
account_details_email=organizations_metadata["Account"]["Email"],
account_details_name=organizations_metadata["Account"]["Name"],
account_details_arn=organizations_metadata["Account"]["Arn"],
account_details_org=organizations_metadata["Account"]["Arn"].split("/")[1],
account_details_tags=account_details_tags,
)
return organizations_info

View File

@@ -14,12 +14,10 @@ from prowler.config.config import (
output_file_timestamp,
)
from prowler.lib.logger import logger
from prowler.lib.outputs.outputs import send_to_s3_bucket
from prowler.providers.aws.lib.arn.models import get_arn_resource_type
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
def quick_inventory(audit_info: AWS_Audit_Info, args):
def quick_inventory(audit_info: AWS_Audit_Info, output_directory: str):
resources = []
global_resources = []
total_resources_per_region = {}
@@ -64,32 +62,27 @@ def quick_inventory(audit_info: AWS_Audit_Info, args):
)
# Get all the resources
resources_count = 0
try:
get_resources_paginator = client.get_paginator("get_resources")
for page in get_resources_paginator.paginate():
resources_count += len(page["ResourceTagMappingList"])
for resource in page["ResourceTagMappingList"]:
# Avoid adding S3 buckets again:
if resource["ResourceARN"].split(":")[2] != "s3":
# Check if region is not in ARN --> Global service
if not resource["ResourceARN"].split(":")[3]:
global_resources.append(
{
"arn": resource["ResourceARN"],
"tags": resource["Tags"],
}
)
else:
resources_in_region.append(
{
"arn": resource["ResourceARN"],
"tags": resource["Tags"],
}
)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
get_resources_paginator = client.get_paginator("get_resources")
for page in get_resources_paginator.paginate():
resources_count += len(page["ResourceTagMappingList"])
for resource in page["ResourceTagMappingList"]:
# Avoid adding S3 buckets again:
if resource["ResourceARN"].split(":")[2] != "s3":
# Check if region is not in ARN --> Global service
if not resource["ResourceARN"].split(":")[3]:
global_resources.append(
{
"arn": resource["ResourceARN"],
"tags": resource["Tags"],
}
)
else:
resources_in_region.append(
{
"arn": resource["ResourceARN"],
"tags": resource["Tags"],
}
)
bar()
if len(resources_in_region) > 0:
total_resources_per_region[region] = len(resources_in_region)
@@ -119,7 +112,7 @@ def quick_inventory(audit_info: AWS_Audit_Info, args):
)
print(f"\nTotal resources found: {Fore.GREEN}{len(resources)}{Style.RESET_ALL}")
create_output(resources, audit_info, args)
create_output(resources, audit_info, output_directory)
def create_inventory_table(resources: list, resources_in_region: dict) -> dict:
@@ -160,8 +153,22 @@ def create_inventory_table(resources: list, resources_in_region: dict) -> dict:
services[service] = 0
services[service] += 1
resource_type = get_arn_resource_type(resource["arn"], service)
if service == "s3":
resource_type = "bucket"
elif service == "sns":
resource_type = "topic"
elif service == "sqs":
resource_type = "queue"
elif service == "apigateway":
split_parts = resource["arn"].split(":")[5].split("/")
if "integration" in split_parts and "responses" in split_parts:
resource_type = "restapis-resources-methods-integration-response"
elif "documentation" in split_parts and "parts" in split_parts:
resource_type = "restapis-documentation-parts"
else:
resource_type = resource["arn"].split(":")[5].split("/")[1]
else:
resource_type = resource["arn"].split(":")[5].split("/")[0]
if service not in resources_type:
resources_type[service] = {}
if resource_type not in resources_type[service]:
@@ -209,11 +216,9 @@ def create_inventory_table(resources: list, resources_in_region: dict) -> dict:
return inventory_table
def create_output(resources: list, audit_info: AWS_Audit_Info, args):
def create_output(resources: list, audit_info: AWS_Audit_Info, output_directory: str):
json_output = []
output_file = (
f"prowler-inventory-{audit_info.audited_account}-{output_file_timestamp}"
)
output_file = f"{output_directory}/prowler-inventory-{audit_info.audited_account}-{output_file_timestamp}"
for item in sorted(resources, key=lambda d: d["arn"]):
resource = {}
@@ -252,14 +257,10 @@ def create_output(resources: list, audit_info: AWS_Audit_Info, args):
json_object = json.dumps(json_output, indent=4)
# Writing to sample.json
with open(
args.output_directory + "/" + output_file + json_file_suffix, "w"
) as outfile:
with open(output_file + json_file_suffix, "w") as outfile:
outfile.write(json_object)
csv_file = open(
args.output_directory + "/" + output_file + csv_file_suffix, "w", newline=""
)
csv_file = open(output_file + csv_file_suffix, "w", newline="")
csv_writer = csv.writer(csv_file)
count = 0
@@ -271,115 +272,68 @@ def create_output(resources: list, audit_info: AWS_Audit_Info, args):
csv_writer.writerow(data.values())
csv_file.close()
print(
f"\n{Fore.YELLOW}WARNING: Only resources that have or have had tags will appear (except for IAM and S3).\nSee more in https://docs.prowler.cloud/en/latest/tutorials/quick-inventory/#objections{Style.RESET_ALL}"
)
print("\nMore details in files:")
print(f" - CSV: {args.output_directory}/{output_file+csv_file_suffix}")
print(f" - JSON: {args.output_directory}/{output_file+json_file_suffix}")
# Send output to S3 if needed (-B / -D)
for mode in ["json", "csv"]:
if args.output_bucket or args.output_bucket_no_assume:
# Check if -B was input
if args.output_bucket:
output_bucket = args.output_bucket
bucket_session = audit_info.audit_session
# Check if -D was input
elif args.output_bucket_no_assume:
output_bucket = args.output_bucket_no_assume
bucket_session = audit_info.original_session
send_to_s3_bucket(
output_file,
args.output_directory,
mode,
output_bucket,
bucket_session,
)
print("\nMore details in files:")
print(f" - CSV: {output_file+csv_file_suffix}")
print(f" - JSON: {output_file+json_file_suffix}")
def get_regional_buckets(audit_info: AWS_Audit_Info, region: str) -> list:
regional_buckets = []
s3_client = audit_info.audit_session.client("s3", region_name=region)
try:
buckets = s3_client.list_buckets()
for bucket in buckets["Buckets"]:
bucket_region = s3_client.get_bucket_location(Bucket=bucket["Name"])[
"LocationConstraint"
]
if bucket_region == "EU": # If EU, bucket_region is eu-west-1
bucket_region = "eu-west-1"
if not bucket_region: # If None, bucket_region is us-east-1
bucket_region = "us-east-1"
if bucket_region == region: # Only add bucket if is in current region
try:
bucket_tags = s3_client.get_bucket_tagging(Bucket=bucket["Name"])[
"TagSet"
]
except ClientError as error:
bucket_tags = []
if error.response["Error"]["Code"] != "NoSuchTagSet":
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
bucket_arn = (
f"arn:{audit_info.audited_partition}:s3:{region}::{bucket['Name']}"
)
regional_buckets.append({"arn": bucket_arn, "tags": bucket_tags})
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
buckets = s3_client.list_buckets()
for bucket in buckets["Buckets"]:
bucket_region = s3_client.get_bucket_location(Bucket=bucket["Name"])[
"LocationConstraint"
]
if bucket_region == "EU": # If EU, bucket_region is eu-west-1
bucket_region = "eu-west-1"
if not bucket_region: # If None, bucket_region is us-east-1
bucket_region = "us-east-1"
if bucket_region == region: # Only add bucket if is in current region
try:
bucket_tags = s3_client.get_bucket_tagging(Bucket=bucket["Name"])[
"TagSet"
]
except ClientError as error:
bucket_tags = []
if error.response["Error"]["Code"] != "NoSuchTagSet":
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
bucket_arn = (
f"arn:{audit_info.audited_partition}:s3:{region}::{bucket['Name']}"
)
regional_buckets.append({"arn": bucket_arn, "tags": bucket_tags})
return regional_buckets
def get_iam_resources(session) -> list:
iam_resources = []
iam_client = session.client("iam")
try:
get_roles_paginator = iam_client.get_paginator("list_roles")
for page in get_roles_paginator.paginate():
for role in page["Roles"]:
# Avoid aws-service-role roles
if "aws-service-role" not in role["Arn"]:
iam_resources.append({"arn": role["Arn"], "tags": role.get("Tags")})
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
try:
get_users_paginator = iam_client.get_paginator("list_users")
for page in get_users_paginator.paginate():
for user in page["Users"]:
iam_resources.append({"arn": user["Arn"], "tags": user.get("Tags")})
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
try:
get_groups_paginator = iam_client.get_paginator("list_groups")
for page in get_groups_paginator.paginate():
for group in page["Groups"]:
iam_resources.append({"arn": group["Arn"], "tags": []})
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
try:
get_policies_paginator = iam_client.get_paginator("list_policies")
for page in get_policies_paginator.paginate(Scope="Local"):
for policy in page["Policies"]:
iam_resources.append({"arn": policy["Arn"], "tags": policy.get("Tags")})
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
try:
for saml_provider in iam_client.list_saml_providers()["SAMLProviderList"]:
iam_resources.append({"arn": saml_provider["Arn"], "tags": []})
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
get_roles_paginator = iam_client.get_paginator("list_roles")
for page in get_roles_paginator.paginate():
for role in page["Roles"]:
# Avoid aws-service-role roles
if "aws-service-role" not in role["Arn"]:
iam_resources.append({"arn": role["Arn"], "tags": role.get("Tags")})
get_users_paginator = iam_client.get_paginator("list_users")
for page in get_users_paginator.paginate():
for user in page["Users"]:
iam_resources.append({"arn": user["Arn"], "tags": user.get("Tags")})
get_groups_paginator = iam_client.get_paginator("list_groups")
for page in get_groups_paginator.paginate():
for group in page["Groups"]:
iam_resources.append({"arn": group["Arn"], "tags": []})
get_policies_paginator = iam_client.get_paginator("list_policies")
for page in get_policies_paginator.paginate(Scope="Local"):
for policy in page["Policies"]:
iam_resources.append({"arn": policy["Arn"], "tags": policy.get("Tags")})
for saml_provider in iam_client.list_saml_providers()["SAMLProviderList"]:
iam_resources.append({"arn": saml_provider["Arn"], "tags": []})
return iam_resources

View File

@@ -1,7 +1,6 @@
import threading
from typing import Optional
from botocore.exceptions import ClientError
from pydantic import BaseModel
from prowler.lib.logger import logger
@@ -80,21 +79,10 @@ class AccessAnalyzer:
if analyzer.status == "ACTIVE":
regional_client = self.regional_clients[analyzer.region]
for finding in analyzer.findings:
try:
finding_information = regional_client.get_finding(
analyzerArn=analyzer.arn, id=finding.id
)
finding.status = finding_information["finding"]["status"]
except ClientError as error:
if (
error.response["Error"]["Code"]
== "ResourceNotFoundException"
):
logger.warning(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
finding.status = ""
continue
finding_information = regional_client.get_finding(
analyzerArn=analyzer.arn, id=finding.id
)
finding.status = finding_information["finding"]["status"]
except Exception as error:
logger.error(

View File

@@ -9,7 +9,6 @@ class account_maintain_current_contact_details(Check):
report = Check_Report_AWS(self.metadata())
report.region = account_client.region
report.resource_id = account_client.audited_account
report.resource_arn = account_client.audited_account_arn
report.status = "INFO"
report.status_extended = "Manual check: Login to the AWS Console. Choose your account name on the top right of the window -> My Account -> Contact Information."
return [report]

View File

@@ -9,7 +9,6 @@ class account_security_contact_information_is_registered(Check):
report = Check_Report_AWS(self.metadata())
report.region = account_client.region
report.resource_id = account_client.audited_account
report.resource_arn = account_client.audited_account_arn
report.status = "INFO"
report.status_extended = "Manual check: Login to the AWS Console. Choose your account name on the top right of the window -> My Account -> Alternate Contacts -> Security Section."
return [report]

View File

@@ -9,7 +9,6 @@ class account_security_questions_are_registered_in_the_aws_account(Check):
report = Check_Report_AWS(self.metadata())
report.region = account_client.region
report.resource_id = account_client.audited_account
report.resource_arn = account_client.audited_account_arn
report.status = "INFO"
report.status_extended = "Manual check: Login to the AWS Console as root. Choose your account name on the top right of the window -> My Account -> Configure Security Challenge Questions."
return [report]

View File

@@ -1,19 +1,12 @@
from prowler.providers.aws.aws_provider import (
generate_regional_clients,
get_default_region,
)
################## Account
class Account:
def __init__(self, audit_info):
self.service = "account"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audited_partition = audit_info.audited_partition
self.audited_account_arn = audit_info.audited_account_arn
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.region = get_default_region(self.service, audit_info)
self.region = audit_info.profile_region
def __get_session__(self):
return self.session

View File

@@ -41,7 +41,7 @@ class APIGateway:
get_rest_apis_paginator = regional_client.get_paginator("get_rest_apis")
for page in get_rest_apis_paginator.paginate():
for apigw in page["items"]:
arn = f"arn:{self.audited_partition}:apigateway:{regional_client.region}::/restapis/{apigw['id']}"
arn = f"arn:{self.audited_partition}:apigateway:{regional_client.region}::/apis/{apigw['id']}"
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
@@ -100,7 +100,7 @@ class APIGateway:
logging = True
if "clientCertificateId" in stage:
client_certificate = True
arn = f"arn:{self.audited_partition}:apigateway:{regional_client.region}::/restapis/{rest_api.id}/stages/{stage['stageName']}"
arn = f"arn:{self.audited_partition}:apigateway:{regional_client.region}::/apis/{rest_api.id}/stages/{stage['stageName']}"
rest_api.stages.append(
Stage(
name=stage["stageName"],

View File

@@ -21,8 +21,8 @@
"Terraform": "https://docs.bridgecrew.io/docs/bc_aws_logging_30#cloudformation"
},
"Recommendation": {
"Text": "Monitoring is an important part of maintaining the reliability, availability and performance of API Gateway and your AWS solutions. You should collect monitoring data from all of the parts of your AWS solution. CloudTrail provides a record of actions taken by a user, role, or an AWS service in API Gateway. Using the information collected by CloudTrail, you can determine the request that was made to API Gateway, the IP address from which the request was made, who made the request, etc.",
"Url": "https://docs.aws.amazon.com/apigateway/latest/developerguide/security-monitoring.html"
"Text": "Implement Amazon Cognito or a Lambda function to control access to your API.",
"Url": "https://docs.aws.amazon.com/apigatewayv2/latest/api-reference/apis-apiid-authorizers.html"
}
},
"Categories": [],

View File

@@ -1,7 +1,7 @@
{
"Provider": "aws",
"CheckID": "apigatewayv2_authorizers_enabled",
"CheckTitle": "Checks if API Gateway V2 has configured authorizers.",
"CheckTitle": "Checks if API Gateway V2 has Access Logging enabled.",
"CheckType": [
"Logging and Monitoring"
],
@@ -10,8 +10,8 @@
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "medium",
"ResourceType": "AwsApiGatewayV2Api",
"Description": "Checks if API Gateway V2 has configured authorizers.",
"Risk": "If no authorizer is enabled anyone can use the service.",
"Description": "Checks if API Gateway V2 has Access Logging enabled.",
"Risk": "If not enabled the logging of API calls is not possible. This information is important for monitoring API access.",
"RelatedUrl": "",
"Remediation": {
"Code": {
@@ -21,8 +21,8 @@
"Terraform": ""
},
"Recommendation": {
"Text": "Implement Amazon Cognito or a Lambda function to control access to your API",
"Url": "https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-use-lambda-authorizer.html"
"Text": "Enable Access Logging in the API stage.",
"Url": "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apigatewayv2-stage-accesslogsettings.html"
}
},
"Categories": [],

View File

@@ -10,18 +10,18 @@ class apigatewayv2_authorizers_enabled(Check):
for api in apigatewayv2_client.apis:
report = Check_Report_AWS(self.metadata())
report.region = api.region
report.resource_id = api.name
report.resource_arn = api.arn
report.resource_tags = api.tags
report.status = "FAIL"
report.status_extended = (
f"API Gateway V2 {api.name} ID {api.id} has not authorizer configured."
)
if api.authorizer:
report.status = "PASS"
report.status_extended = (
f"API Gateway V2 {api.name} ID {api.id} has authorizer configured."
)
report.resource_id = api.name
report.resource_tags = api.tags
else:
report.status = "FAIL"
report.status_extended = f"API Gateway V2 {api.name} ID {api.id} has not authorizer configured."
report.resource_id = api.name
report.resource_tags = api.tags
findings.append(report)
return findings

View File

@@ -14,7 +14,6 @@ class ApiGatewayV2:
self.service = "apigatewayv2"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audited_partition = audit_info.audited_partition
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.apis = []
@@ -37,16 +36,14 @@ class ApiGatewayV2:
def __get_apis__(self, regional_client):
logger.info("APIGatewayv2 - Getting APIs...")
try:
get_apis_paginator = regional_client.get_paginator("get_apis")
for page in get_apis_paginator.paginate():
get_rest_apis_paginator = regional_client.get_paginator("get_apis")
for page in get_rest_apis_paginator.paginate():
for apigw in page["Items"]:
arn = f"arn:{self.audited_partition}:apigateway:{regional_client.region}::apis/{apigw['ApiId']}"
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
is_resource_filtered(apigw["ApiId"], self.audit_resources)
):
self.apis.append(
API(
arn=arn,
id=apigw["ApiId"],
region=regional_client.region,
name=apigw["Name"],
@@ -101,7 +98,6 @@ class Stage(BaseModel):
class API(BaseModel):
arn: str
id: str
region: str
name: str

View File

@@ -7,7 +7,7 @@
],
"ServiceName": "autoscaling",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:autoscaling:region:account-id:autoScalingGroupName/resource-name",
"ResourceIdTemplate": "arn:partition:access-analyzer:region:account-id:analyzer/resource-id",
"Severity": "critical",
"ResourceType": "Other",
"Description": "Find secrets in EC2 Auto Scaling Launch Configuration",

View File

@@ -1,30 +0,0 @@
{
"Provider": "aws",
"CheckID": "autoscaling_group_multiple_az",
"CheckTitle": "EC2 Auto Scaling Group should use multiple Availability Zones",
"CheckType": [],
"ServiceName": "autoscaling",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:autoscaling:region:account-id:autoScalingGroupName/resource-name",
"Severity": "medium",
"ResourceType": "Other",
"Description": "EC2 Auto Scaling Group should use multiple Availability Zones",
"Risk": "In case of a failure in a single Availability Zone, the Auto Scaling Group will not be able to launch new instances to replace the failed ones.",
"RelatedUrl": "https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-add-availability-zone.html",
"Remediation": {
"Code": {
"CLI": "aws autoscaling update-auto-scaling-group",
"NativeIaC": "",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/AutoScaling/multiple-availability-zones.html",
"Terraform": ""
},
"Recommendation": {
"Text": "Configure multiple Availability Zones for EC2 Auto Scaling Group",
"Url": "https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-add-availability-zone.html"
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -1,28 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.autoscaling.autoscaling_client import (
autoscaling_client,
)
class autoscaling_group_multiple_az(Check):
def execute(self):
findings = []
for group in autoscaling_client.groups:
report = Check_Report_AWS(self.metadata())
report.region = group.region
report.resource_id = group.name
report.resource_arn = group.arn
report.resource_tags = group.tags
report.status = "FAIL"
report.status_extended = (
f"Autoscaling group {group.name} has only one availability zones."
)
if len(group.availability_zones) > 1:
report.status = "PASS"
report.status_extended = (
f"Autoscaling group {group.name} has multiple availability zones."
)
findings.append(report)
return findings

View File

@@ -17,8 +17,6 @@ class AutoScaling:
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.launch_configurations = []
self.__threading_call__(self.__describe_launch_configurations__)
self.groups = []
self.__threading_call__(self.__describe_auto_scaling_groups__)
def __get_session__(self):
return self.session
@@ -61,35 +59,6 @@ class AutoScaling:
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __describe_auto_scaling_groups__(self, regional_client):
logger.info("AutoScaling - Describing AutoScaling Groups...")
try:
describe_auto_scaling_groups_paginator = regional_client.get_paginator(
"describe_auto_scaling_groups"
)
for page in describe_auto_scaling_groups_paginator.paginate():
for group in page["AutoScalingGroups"]:
if not self.audit_resources or (
is_resource_filtered(
group["AutoScalingGroupARN"],
self.audit_resources,
)
):
self.groups.append(
Group(
arn=group.get("AutoScalingGroupARN"),
name=group.get("AutoScalingGroupName"),
region=regional_client.region,
availability_zones=group.get("AvailabilityZones"),
tags=group.get("Tags"),
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
class LaunchConfiguration(BaseModel):
arn: str
@@ -97,11 +66,3 @@ class LaunchConfiguration(BaseModel):
user_data: str
image_id: str
region: str
class Group(BaseModel):
arn: str
name: str
region: str
availability_zones: list
tags: list = []

View File

@@ -62,19 +62,18 @@ class Lambda:
):
lambda_name = function["FunctionName"]
lambda_arn = function["FunctionArn"]
# We must use the Lambda ARN as the dict key since we could have Lambdas in different regions with the same name
self.functions[lambda_arn] = Function(
self.functions[lambda_name] = Function(
name=lambda_name,
arn=lambda_arn,
region=regional_client.region,
)
if "Runtime" in function:
self.functions[lambda_arn].runtime = function["Runtime"]
self.functions[lambda_name].runtime = function["Runtime"]
if "Environment" in function:
lambda_environment = function["Environment"].get(
"Variables"
)
self.functions[lambda_arn].environment = lambda_environment
self.functions[lambda_name].environment = lambda_environment
except Exception as error:
logger.error(
@@ -94,7 +93,7 @@ class Lambda:
if "Location" in function_information["Code"]:
code_location_uri = function_information["Code"]["Location"]
raw_code_zip = requests.get(code_location_uri).content
self.functions[function.arn].code = LambdaCode(
self.functions[function.name].code = LambdaCode(
location=code_location_uri,
code_zip=zipfile.ZipFile(io.BytesIO(raw_code_zip)),
)
@@ -115,12 +114,12 @@ class Lambda:
function_policy = regional_client.get_policy(
FunctionName=function.name
)
self.functions[function.arn].policy = json.loads(
self.functions[function.name].policy = json.loads(
function_policy["Policy"]
)
except ClientError as e:
if e.response["Error"]["Code"] == "ResourceNotFoundException":
self.functions[function.arn].policy = {}
self.functions[function.name].policy = {}
except Exception as error:
logger.error(
@@ -142,14 +141,14 @@ class Lambda:
allow_origins = function_url_config["Cors"]["AllowOrigins"]
else:
allow_origins = []
self.functions[function.arn].url_config = URLConfig(
self.functions[function.name].url_config = URLConfig(
auth_type=function_url_config["AuthType"],
url=function_url_config["FunctionUrl"],
cors_config=URLConfigCORS(allow_origins=allow_origins),
)
except ClientError as e:
if e.response["Error"]["Code"] == "ResourceNotFoundException":
self.functions[function.arn].url_config = None
self.functions[function.name].url_config = None
except Exception as error:
logger.error(
@@ -162,14 +161,9 @@ class Lambda:
logger.info("Lambda - List Tags...")
try:
for function in self.functions.values():
try:
regional_client = self.regional_clients[function.region]
response = regional_client.list_tags(Resource=function.arn)["Tags"]
function.tags = [response]
except ClientError as e:
if e.response["Error"]["Code"] == "ResourceNotFoundException":
function.tags = []
regional_client = self.regional_clients[function.region]
response = regional_client.list_tags(Resource=function.arn)["Tags"]
function.tags = [response]
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

Some files were not shown because too many files have changed in this diff Show More