Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bffe2a2c63 |
2
.github/CODEOWNERS
vendored
@@ -1 +1 @@
|
||||
* @prowler-cloud/prowler-oss
|
||||
* @prowler-cloud/prowler-team
|
||||
|
||||
52
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: "[Bug]: "
|
||||
labels: bug, status/needs-triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
Please use this template to create your bug report. By providing as much info as possible you help us understand the issue, reproduce it and resolve it for you quicker. Therefore, take a couple of extra minutes to make sure you have provided all info needed.
|
||||
|
||||
PROTIP: record your screen and attach it as a gif to showcase the issue.
|
||||
|
||||
- How to record and attach gif: https://bit.ly/2Mi8T6K
|
||||
-->
|
||||
|
||||
**What happened?**
|
||||
A clear and concise description of what the bug is or what is not working as expected
|
||||
|
||||
|
||||
**How to reproduce it**
|
||||
Steps to reproduce the behavior:
|
||||
1. What command are you running?
|
||||
2. Cloud provider you are launching
|
||||
3. Environment you have like single account, multi-account, organizations, multi or single subsctiption, etc.
|
||||
4. See error
|
||||
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
|
||||
**Screenshots or Logs**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
Also, you can add logs (anonymize them first!). Here a command that may help to share a log
|
||||
`prowler <your arguments> --log-level DEBUG --log-file $(date +%F)_debug.log` then attach here the log file.
|
||||
|
||||
|
||||
**From where are you running Prowler?**
|
||||
Please, complete the following information:
|
||||
- Resource: (e.g. EC2 instance, Fargate task, Docker container manually, EKS, Cloud9, CodeBuild, workstation, etc.)
|
||||
- OS: [e.g. Amazon Linux 2, Mac, Alpine, Windows, etc. ]
|
||||
- Prowler Version [`prowler --version`]:
|
||||
- Python version [`python --version`]:
|
||||
- Pip version [`pip --version`]:
|
||||
- Installation method (Are you running it from pip package or cloning the github repo?):
|
||||
- Others:
|
||||
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
97
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,97 +0,0 @@
|
||||
name: 🐞 Bug Report
|
||||
description: Create a report to help us improve
|
||||
title: "[Bug]: "
|
||||
labels: ["bug", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to Reproduce
|
||||
description: Steps to reproduce the behavior
|
||||
placeholder: |-
|
||||
1. What command are you running?
|
||||
2. Cloud provider you are launching
|
||||
3. Environment you have, like single account, multi-account, organizations, multi or single subscription, etc.
|
||||
4. See error
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: Actual Result with Screenshots or Logs
|
||||
description: If applicable, add screenshots to help explain your problem. Also, you can add logs (anonymize them first!). Here a command that may help to share a log `prowler <your arguments> --log-level DEBUG --log-file $(date +%F)_debug.log` then attach here the log file.
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: type
|
||||
attributes:
|
||||
label: How did you install Prowler?
|
||||
options:
|
||||
- Cloning the repository from github.com (git clone)
|
||||
- From pip package (pip install prowler)
|
||||
- From brew (brew install prowler)
|
||||
- Docker (docker pull toniblyx/prowler)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Environment Resource
|
||||
description: From where are you running Prowler?
|
||||
placeholder: |-
|
||||
1. EC2 instance
|
||||
2. Fargate task
|
||||
3. Docker container locally
|
||||
4. EKS
|
||||
5. Cloud9
|
||||
6. CodeBuild
|
||||
7. Workstation
|
||||
8. Other(please specify)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: os
|
||||
attributes:
|
||||
label: OS used
|
||||
description: Which OS are you using?
|
||||
placeholder: |-
|
||||
1. Amazon Linux 2
|
||||
2. MacOS
|
||||
3. Alpine Linux
|
||||
4. Windows
|
||||
5. Other(please specify)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: prowler-version
|
||||
attributes:
|
||||
label: Prowler version
|
||||
description: Which Prowler version are you using?
|
||||
placeholder: |-
|
||||
prowler --version
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: pip-version
|
||||
attributes:
|
||||
label: Pip version
|
||||
description: Which pip version are you using?
|
||||
placeholder: |-
|
||||
pip --version
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
description: Additional context
|
||||
label: Context
|
||||
validations:
|
||||
required: false
|
||||
36
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: 💡 Feature Request
|
||||
description: Suggest an idea for this project
|
||||
labels: ["enhancement", "status/needs-triage"]
|
||||
|
||||
|
||||
body:
|
||||
- type: textarea
|
||||
id: Problem
|
||||
attributes:
|
||||
label: New feature motivation
|
||||
description: Is your feature request related to a problem? Please describe
|
||||
placeholder: |-
|
||||
1. A clear and concise description of what the problem is. Ex. I'm always frustrated when
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Solution
|
||||
attributes:
|
||||
label: Solution Proposed
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Alternatives
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: A clear and concise description of any alternative solutions or features you've considered.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Context
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: Add any other context or screenshots about the feature request here.
|
||||
validations:
|
||||
required: false
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement, status/needs-triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
2
.github/dependabot.yml
vendored
@@ -8,7 +8,7 @@ updates:
|
||||
- package-ecosystem: "pip" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
|
||||
178
.github/workflows/build-lint-push-containers.yml
vendored
@@ -15,17 +15,36 @@ on:
|
||||
env:
|
||||
AWS_REGION_STG: eu-west-1
|
||||
AWS_REGION_PLATFORM: eu-west-1
|
||||
AWS_REGION: us-east-1
|
||||
AWS_REGION_PRO: us-east-1
|
||||
IMAGE_NAME: prowler
|
||||
LATEST_TAG: latest
|
||||
STABLE_TAG: stable
|
||||
TEMPORARY_TAG: temporary
|
||||
DOCKERFILE_PATH: ./Dockerfile
|
||||
PYTHON_VERSION: 3.9
|
||||
|
||||
jobs:
|
||||
# Lint Dockerfile using Hadolint
|
||||
# dockerfile-linter:
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# -
|
||||
# name: Checkout
|
||||
# uses: actions/checkout@v3
|
||||
# -
|
||||
# name: Install Hadolint
|
||||
# run: |
|
||||
# VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
# grep '"tag_name":' | \
|
||||
# sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
# ) && curl -L -o /tmp/hadolint https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64 \
|
||||
# && chmod +x /tmp/hadolint
|
||||
# -
|
||||
# name: Run Hadolint
|
||||
# run: |
|
||||
# /tmp/hadolint util/Dockerfile
|
||||
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
container-build:
|
||||
# needs: dockerfile-linter
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
@@ -33,30 +52,87 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup python (release)
|
||||
- name: setup python (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
python-version: 3.9 #install the python needed
|
||||
- name: Install dependencies (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Update Prowler version (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
poetry version ${{ github.event.release.tag_name }}
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
# Without pushing to registries
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
outputs: type=docker,dest=/tmp/${{ env.IMAGE_NAME }}.tar
|
||||
- name: Share image between jobs
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.IMAGE_NAME }}.tar
|
||||
path: /tmp/${{ env.IMAGE_NAME }}.tar
|
||||
|
||||
# Lint Prowler OSS container using Dockle
|
||||
# container-linter:
|
||||
# needs: container-build
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# -
|
||||
# name: Get container image from shared
|
||||
# uses: actions/download-artifact@v2
|
||||
# with:
|
||||
# name: ${{ env.IMAGE_NAME }}.tar
|
||||
# path: /tmp
|
||||
# -
|
||||
# name: Load Docker image
|
||||
# run: |
|
||||
# docker load --input /tmp/${{ env.IMAGE_NAME }}.tar
|
||||
# docker image ls -a
|
||||
# -
|
||||
# name: Install Dockle
|
||||
# run: |
|
||||
# VERSION=$(curl --silent "https://api.github.com/repos/goodwithtech/dockle/releases/latest" | \
|
||||
# grep '"tag_name":' | \
|
||||
# sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
# ) && curl -L -o dockle.deb https://github.com/goodwithtech/dockle/releases/download/v${VERSION}/dockle_${VERSION}_Linux-64bit.deb \
|
||||
# && sudo dpkg -i dockle.deb && rm dockle.deb
|
||||
# -
|
||||
# name: Run Dockle
|
||||
# run: dockle ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }}
|
||||
|
||||
# Push Prowler OSS container to registries
|
||||
container-push:
|
||||
# needs: container-linter
|
||||
needs: container-build
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read # This is required for actions/checkout
|
||||
steps:
|
||||
- name: Get container image from shared
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: ${{ env.IMAGE_NAME }}.tar
|
||||
path: /tmp
|
||||
- name: Load Docker image
|
||||
run: |
|
||||
docker load --input /tmp/${{ env.IMAGE_NAME }}.tar
|
||||
docker image ls -a
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
@@ -64,53 +140,55 @@ jobs:
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
password: ${{ secrets.PUBLIC_ECR_AWS_SECRET_ACCESS_KEY }}
|
||||
env:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
AWS_REGION: ${{ env.AWS_REGION_PRO }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
context: .
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
dispatch-action:
|
||||
needs: container-build-push
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get latest commit info
|
||||
- name: Tag (latest)
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
LATEST_COMMIT_HASH=$(echo ${{ github.event.after }} | cut -b -7)
|
||||
echo "LATEST_COMMIT_HASH=${LATEST_COMMIT_HASH}" >> $GITHUB_ENV
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
|
||||
- # Push to master branch - push "latest" tag
|
||||
name: Push (latest)
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
docker push ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
docker push ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
|
||||
- # Tag the new release (stable and release tag)
|
||||
name: Tag (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
|
||||
- # Push the new release (stable and release tag)
|
||||
name: Push (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
docker push ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
docker push ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
|
||||
docker push ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
docker push ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
|
||||
- name: Delete artifacts
|
||||
if: always()
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
with:
|
||||
name: ${{ env.IMAGE_NAME }}.tar
|
||||
|
||||
dispatch-action:
|
||||
needs: container-push
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dispatch event for latest
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" -H "X-GitHub-Api-Version: 2022-11-28" --data '{"event_type":"dispatch","client_payload":{"version":"latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" -H "X-GitHub-Api-Version: 2022-11-28" --data '{"event_type":"dispatch","client_payload":{"version":"latest"}'
|
||||
- name: Dispatch event for release
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
|
||||
10
.github/workflows/pull-request.yml
vendored
@@ -17,17 +17,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install poetry
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install poetry
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
@@ -35,9 +32,6 @@ jobs:
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
- name: Poetry check
|
||||
run: |
|
||||
poetry lock --check
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
|
||||
|
||||
44
.github/workflows/pypi-release.yml
vendored
@@ -6,6 +6,7 @@ on:
|
||||
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
GITHUB_BRANCH: master
|
||||
|
||||
jobs:
|
||||
release-prowler-job:
|
||||
@@ -16,16 +17,16 @@ jobs:
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
cache: 'poetry'
|
||||
- name: Change version and Build package
|
||||
run: |
|
||||
poetry version ${{ env.RELEASE_TAG }}
|
||||
@@ -35,19 +36,26 @@ jobs:
|
||||
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify
|
||||
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}"
|
||||
git push -f origin ${{ env.RELEASE_TAG }}
|
||||
git checkout -B release-${{ env.RELEASE_TAG }}
|
||||
poetry build
|
||||
- name: Publish prowler package to PyPI
|
||||
run: |
|
||||
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
|
||||
poetry publish
|
||||
- name: Replicate PyPi Package
|
||||
run: |
|
||||
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
|
||||
python util/replicate_pypi_package.py
|
||||
poetry build
|
||||
- name: Publish prowler-cloud package to PyPI
|
||||
run: |
|
||||
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
|
||||
poetry publish
|
||||
# Create pull request with new version
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}."
|
||||
base: master
|
||||
branch: release-${{ env.RELEASE_TAG }}
|
||||
labels: "status/waiting-for-revision, severity/low"
|
||||
title: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}"
|
||||
@@ -59,21 +67,3 @@ jobs:
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
- name: Replicate PyPi Package
|
||||
run: |
|
||||
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
|
||||
pip install toml
|
||||
python util/replicate_pypi_package.py
|
||||
poetry build
|
||||
- name: Publish prowler-cloud package to PyPI
|
||||
run: |
|
||||
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
|
||||
poetry publish
|
||||
# Create pull request to github.com/Homebrew/homebrew-core to update prowler formula
|
||||
- name: Bump Homebrew formula
|
||||
uses: mislav/bump-homebrew-formula-action@v2
|
||||
with:
|
||||
formula-name: prowler
|
||||
base-branch: release-${{ env.RELEASE_TAG }}
|
||||
env:
|
||||
COMMITTER_TOKEN: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
|
||||
@@ -19,7 +19,6 @@ repos:
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
files: pyproject.toml
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
@@ -56,12 +55,10 @@ repos:
|
||||
exclude: contrib
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.4.0 # add version here
|
||||
- repo: https://github.com/haizaar/check-pipfile-lock
|
||||
rev: v0.0.5
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
- id: poetry-lock
|
||||
args: ["--no-update"]
|
||||
- id: check-pipfile-lock
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.12.1-beta
|
||||
@@ -76,15 +73,6 @@ repos:
|
||||
entry: bash -c 'pylint --disable=W,C,R,E -j 0 -rn -sn prowler/'
|
||||
language: system
|
||||
|
||||
- id: trufflehog
|
||||
name: TruffleHog
|
||||
description: Detect secrets in your data.
|
||||
# entry: bash -c 'trufflehog git file://. --only-verified --fail'
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["commit", "push"]
|
||||
|
||||
- id: pytest-check
|
||||
name: pytest-check
|
||||
entry: bash -c 'pytest tests -n auto'
|
||||
|
||||
55
README.md
@@ -11,14 +11,14 @@
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
|
||||
<a href="https://pypi.org/project/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
|
||||
<a href="https://pypi.python.org/pypi/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
|
||||
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=prowler%20downloads"></a>
|
||||
<a href="https://pypistats.org/packages/prowler-cloud"><img alt="PyPI Prowler-Cloud Downloads" src="https://img.shields.io/pypi/dw/prowler-cloud.svg?label=prowler-cloud%20downloads"></a>
|
||||
<a href="https://pypi.org/project/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
|
||||
<a href="https://pypi.python.org/pypi/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
|
||||
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg"></a>
|
||||
<a href="https://pypistats.org/packages/prowler-cloud"><img alt="PyPI Prowler-Cloud Downloads" src="https://img.shields.io/pypi/dw/prowler-cloud.svg"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/toniblyx/prowler"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/cloud/build/toniblyx/prowler"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/image-size/toniblyx/prowler"></a>
|
||||
<a href="https://gallery.ecr.aws/prowler-cloud/prowler"><img width="120" height=19" alt="AWS ECR Gallery" src="https://user-images.githubusercontent.com/3985464/151531396-b6535a68-c907-44eb-95a1-a09508178616.png"></a>
|
||||
<a href="https://gallery.ecr.aws/o4g1s5r6/prowler"><img width="120" height=19" alt="AWS ECR Gallery" src="https://user-images.githubusercontent.com/3985464/151531396-b6535a68-c907-44eb-95a1-a09508178616.png"></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
|
||||
@@ -33,17 +33,12 @@
|
||||
|
||||
# Description
|
||||
|
||||
`Prowler` is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
|
||||
`Prowler` is an Open Source security tool to perform AWS and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
|
||||
|
||||
It contains hundreds of controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
The full documentation can now be found at [https://docs.prowler.cloud](https://docs.prowler.cloud)
|
||||
|
||||
## Looking for Prowler v2 documentation?
|
||||
For Prowler v2 Documentation, please go to https://github.com/prowler-cloud/prowler/tree/2.12.1.
|
||||
|
||||
# ⚙️ Install
|
||||
|
||||
## Pip package
|
||||
@@ -53,7 +48,6 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
More details at https://docs.prowler.cloud
|
||||
|
||||
## Containers
|
||||
|
||||
@@ -66,7 +60,7 @@ The available versions of Prowler are the following:
|
||||
The container images are available here:
|
||||
|
||||
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/o4g1s5r6/prowler)
|
||||
|
||||
## From Github
|
||||
|
||||
@@ -80,11 +74,16 @@ poetry install
|
||||
python prowler.py -v
|
||||
```
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
The full documentation can now be found at [https://docs.prowler.cloud](https://docs.prowler.cloud)
|
||||
|
||||
|
||||
# 📐✏️ High level architecture
|
||||
|
||||
You can run Prowler from your workstation, an EC2 instance, Fargate or any other container, Codebuild, CloudShell and Cloud9.
|
||||
|
||||

|
||||

|
||||
|
||||
# 📝 Requirements
|
||||
|
||||
@@ -163,22 +162,6 @@ Regarding the subscription scope, Prowler by default scans all the subscriptions
|
||||
- `Reader`
|
||||
|
||||
|
||||
## Google Cloud Platform
|
||||
|
||||
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
|
||||
|
||||
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
|
||||
|
||||
- Viewer
|
||||
- Security Reviewer
|
||||
- Stackdriver Account Viewer
|
||||
|
||||
> `prowler` will scan the project associated with the credentials.
|
||||
|
||||
# 💻 Basic Usage
|
||||
|
||||
To run prowler, you will need to specify the provider (e.g aws or azure):
|
||||
@@ -253,14 +236,12 @@ prowler azure [--sp-env-auth, --az-cli-auth, --browser-auth, --managed-identity-
|
||||
```
|
||||
> By default, `prowler` will scan all Azure subscriptions.
|
||||
|
||||
## Google Cloud Platform
|
||||
|
||||
Optionally, you can provide the location of an application credential JSON file with the following argument:
|
||||
|
||||
```console
|
||||
prowler gcp --credentials-file path
|
||||
```
|
||||
# 🎉 New Features
|
||||
|
||||
- Python: we got rid of all bash and it is now all in Python.
|
||||
- Faster: huge performance improvements (same account from 2.5 hours to 4 minutes).
|
||||
- Developers and community: we have made it easier to contribute with new checks and new compliance frameworks. We also included unit tests.
|
||||
- Multi-cloud: in addition to AWS, we have added Azure, we plan to include GCP and OCI soon, let us know if you want to contribute!
|
||||
|
||||
# 📃 License
|
||||
|
||||
|
||||
@@ -79,21 +79,3 @@ Regarding the subscription scope, Prowler by default scans all the subscriptions
|
||||
|
||||
- `Security Reader`
|
||||
- `Reader`
|
||||
|
||||
## Google Cloud
|
||||
|
||||
### GCP Authentication
|
||||
|
||||
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
|
||||
|
||||
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
|
||||
|
||||
- Viewer
|
||||
- Security Reviewer
|
||||
- Stackdriver Account Viewer
|
||||
|
||||
> `prowler` will scan the project associated with the credentials.
|
||||
|
||||
|
Before Width: | Height: | Size: 283 KiB After Width: | Height: | Size: 258 KiB |
|
Before Width: | Height: | Size: 631 KiB |
|
Before Width: | Height: | Size: 320 KiB |
BIN
docs/img/quick-inventory.png
Normal file
|
After Width: | Height: | Size: 220 KiB |
@@ -16,7 +16,7 @@ For **Prowler v2 Documentation**, please go [here](https://github.com/prowler-cl
|
||||
|
||||
## About Prowler
|
||||
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure and Google Cloud security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
|
||||
**Prowler** is an Open Source security tool to perform AWS and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
|
||||
|
||||
It contains hundreds of controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
|
||||
|
||||
@@ -40,7 +40,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
|
||||
* `Python >= 3.9`
|
||||
* `Python pip >= 3.9`
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS and/or Azure credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
@@ -54,7 +54,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
_Requirements_:
|
||||
|
||||
* Have `docker` installed: https://docs.docker.com/get-docker/.
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS and/or Azure credentials
|
||||
* In the command below, change `-v` to your local directory path in order to access the reports.
|
||||
|
||||
_Commands_:
|
||||
@@ -71,7 +71,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
|
||||
_Requirements for Ubuntu 20.04.3 LTS_:
|
||||
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS and/or Azure credentials
|
||||
* Install python 3.9 with: `sudo apt-get install python3.9`
|
||||
* Remove python 3.8 to avoid conflicts if you can: `sudo apt-get remove python3.8`
|
||||
* Make sure you have the python3 distutils package installed: `sudo apt-get install python3-distutils`
|
||||
@@ -87,28 +87,11 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
prowler -v
|
||||
```
|
||||
|
||||
=== "GitHub"
|
||||
|
||||
_Requirements for Developers_:
|
||||
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* `git`, `Python >= 3.9`, `pip` and `poetry` installed (`pip install poetry`)
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
```
|
||||
|
||||
=== "Amazon Linux 2"
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS and/or Azure credentials
|
||||
* Latest Amazon Linux 2 should come with Python 3.9 already installed however it may need pip. Install Python pip 3.9 with: `sudo dnf install -y python3-pip`.
|
||||
* Make sure setuptools for python is already installed with: `pip3 install setuptools`
|
||||
|
||||
@@ -120,20 +103,6 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
prowler -v
|
||||
```
|
||||
|
||||
=== "Brew"
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* `Brew` installed in your Mac or Linux
|
||||
* AWS, GCP and/or Azure credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
``` bash
|
||||
brew install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
=== "AWS CloudShell"
|
||||
|
||||
Prowler can be easely executed in AWS CloudShell but it has some prerequsites to be able to to so. AWS CloudShell is a container running with `Amazon Linux release 2 (Karoo)` that comes with Python 3.7, since Prowler requires Python >= 3.9 we need to first install a newer version of Python. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
|
||||
@@ -185,7 +154,7 @@ The available versions of Prowler are the following:
|
||||
The container images are available here:
|
||||
|
||||
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/o4g1s5r6/prowler)
|
||||
|
||||
## High level architecture
|
||||
|
||||
@@ -194,7 +163,7 @@ You can run Prowler from your workstation, an EC2 instance, Fargate or any other
|
||||

|
||||
## Basic Usage
|
||||
|
||||
To run Prowler, you will need to specify the provider (e.g aws, gcp or azure):
|
||||
To run Prowler, you will need to specify the provider (e.g aws or azure):
|
||||
> If no provider specified, AWS will be used for backward compatibility with most of v2 options.
|
||||
|
||||
```console
|
||||
@@ -226,7 +195,6 @@ For executing specific checks or services you can use options `-c`/`checks` or `
|
||||
```console
|
||||
prowler azure --checks storage_blob_public_access_level_is_disabled
|
||||
prowler aws --services s3 ec2
|
||||
prowler gcp --services iam compute
|
||||
```
|
||||
|
||||
Also, checks and services can be excluded with options `-e`/`--excluded-checks` or `--excluded-services`:
|
||||
@@ -234,7 +202,6 @@ Also, checks and services can be excluded with options `-e`/`--excluded-checks`
|
||||
```console
|
||||
prowler aws --excluded-checks s3_bucket_public_access
|
||||
prowler azure --excluded-services defender iam
|
||||
prowler gcp --excluded-services kms
|
||||
```
|
||||
|
||||
More options and executions methods that will save your time in [Miscelaneous](tutorials/misc.md).
|
||||
@@ -254,8 +221,6 @@ prowler aws --profile custom-profile -f us-east-1 eu-south-2
|
||||
```
|
||||
> By default, `prowler` will scan all AWS regions.
|
||||
|
||||
See more details about AWS Authentication in [Requirements](getting-started/requirements.md)
|
||||
|
||||
### Azure
|
||||
|
||||
With Azure you need to specify which auth method is going to be used:
|
||||
@@ -274,28 +239,9 @@ prowler azure --browser-auth
|
||||
prowler azure --managed-identity-auth
|
||||
```
|
||||
|
||||
See more details about Azure Authentication in [Requirements](getting-started/requirements.md)
|
||||
More details in [Requirements](getting-started/requirements.md)
|
||||
|
||||
Prowler by default scans all the subscriptions that is allowed to scan, if you want to scan a single subscription or various concrete subscriptions you can use the following flag (using az cli auth as example):
|
||||
```console
|
||||
prowler azure --az-cli-auth --subscription-ids <subscription ID 1> <subscription ID 2> ... <subscription ID N>
|
||||
```
|
||||
|
||||
### Google Cloud
|
||||
|
||||
Prowler will use by default your User Account credentials, you can configure it using:
|
||||
|
||||
- `gcloud init` to use a new account
|
||||
- `gcloud config set account <account>` to use an existing account
|
||||
|
||||
Then, obtain your access credentials using: `gcloud auth application-default login`
|
||||
|
||||
Otherwise, you can generate and download Service Account keys in JSON format (refer to https://cloud.google.com/iam/docs/creating-managing-service-account-keys) and provide the location of the file with the following argument:
|
||||
|
||||
```console
|
||||
prowler gcp --credentials-file path
|
||||
```
|
||||
|
||||
> `prowler` will scan the GCP project associated with the credentials.
|
||||
|
||||
See more details about GCP Authentication in [Requirements](getting-started/requirements.md)
|
||||
|
||||
@@ -7,52 +7,35 @@ You can use `-w`/`--allowlist-file` with the path of your allowlist yaml file, b
|
||||
|
||||
## Allowlist Yaml File Syntax
|
||||
|
||||
### Account, Check and/or Region can be * to apply for all the cases.
|
||||
### Resources and tags are lists that can have either Regex or Keywords.
|
||||
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
|
||||
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
|
||||
### Account, Check and/or Region can be * to apply for all the cases
|
||||
### Resources is a list that can have either Regex or Keywords:
|
||||
########################### ALLOWLIST EXAMPLE ###########################
|
||||
Allowlist:
|
||||
Accounts:
|
||||
"123456789012":
|
||||
Checks:
|
||||
Checks:
|
||||
"iam_user_hardware_mfa_enabled":
|
||||
Regions:
|
||||
Regions:
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
Resources:
|
||||
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
|
||||
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
|
||||
"ec2_*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*" # Will ignore every EC2 check in every account and region
|
||||
"*":
|
||||
Regions:
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
|
||||
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
|
||||
Resources:
|
||||
- "test" # Will ignore every resource containing the string "test" in every account and region
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
Checks:
|
||||
"s3_bucket_object_versioning":
|
||||
Regions:
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
Resources:
|
||||
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
|
||||
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
|
||||
- "[[:alnum:]]+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Tags:
|
||||
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
|
||||
|
||||
|
||||
## Supported Allowlist Locations
|
||||
@@ -87,7 +70,6 @@ prowler aws -w arn:aws:dynamodb:<region_name>:<account_id>:table/<table_name>
|
||||
- Checks (String): This field can contain either a Prowler Check Name or an `*` (which applies to all the scanned checks).
|
||||
- Regions (List): This field contains a list of regions where this allowlist rule is applied (it can also contains an `*` to apply all scanned regions).
|
||||
- Resources (List): This field contains a list of regex expressions that applies to the resources that are wanted to be allowlisted.
|
||||
- Tags (List): -Optional- This field contains a list of tuples in the form of 'key=value' that applies to the resources tags that are wanted to be allowlisted.
|
||||
|
||||
<img src="../img/allowlist-row.png"/>
|
||||
|
||||
@@ -119,7 +101,7 @@ generates an Allowlist:
|
||||
```
|
||||
def handler(event, context):
|
||||
checks = {}
|
||||
checks["vpc_flow_logs_enabled"] = { "Regions": [ "*" ], "Resources": [ "" ], Optional("Tags"): [ "key:value" ] }
|
||||
checks["vpc_flow_logs_enabled"] = { "Regions": [ "*" ], "Resources": [ "" ] }
|
||||
|
||||
al = { "Allowlist": { "Accounts": { "*": { "Checks": checks } } } }
|
||||
return al
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Scan Multiple AWS Accounts
|
||||
|
||||
Prowler can scan multiple accounts when it is executed from one account that can assume a role in those given accounts to scan using [Assume Role feature](role-assumption.md) and [AWS Organizations integration feature](organizations.md).
|
||||
Prowler can scan multiple accounts when it is ejecuted from one account that can assume a role in those given accounts to scan using [Assume Role feature](role-assumption.md) and [AWS Organizations integration feature](organizations.md).
|
||||
|
||||
|
||||
## Scan multiple specific accounts sequentially
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
# AWS Regions and Partitions
|
||||
|
||||
By default Prowler is able to scan the following AWS partitions:
|
||||
|
||||
- Commercial: `aws`
|
||||
- China: `aws-cn`
|
||||
- GovCloud (US): `aws-us-gov`
|
||||
|
||||
> To check the available regions for each partition and service please refer to the following document [aws_regions_by_service.json](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/aws_regions_by_service.json)
|
||||
|
||||
It is important to take into consideration that to scan the China (`aws-cn`) or GovCloud (`aws-us-gov`) partitions it is either required to have a valid region for that partition in your AWS credentials or to specify the regions you want to audit for that partition using the `-f/--region` flag.
|
||||
> Please, refer to https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials for more information about the AWS credentials configuration.
|
||||
|
||||
You can get more information about the available partitions and regions in the following [Botocore](https://github.com/boto/botocore) [file](https://github.com/boto/botocore/blob/22a19ea7c4c2c4dd7df4ab8c32733cba0c7597a4/botocore/data/partitions.json).
|
||||
## AWS China
|
||||
|
||||
To scan your AWS account in the China partition (`aws-cn`):
|
||||
|
||||
- Using the `-f/--region` flag:
|
||||
```
|
||||
prowler aws --region cn-north-1 cn-northwest-1
|
||||
```
|
||||
- Using the region configured in your AWS profile at `~/.aws/credentials` or `~/.aws/config`:
|
||||
```
|
||||
[default]
|
||||
aws_access_key_id = XXXXXXXXXXXXXXXXXXX
|
||||
aws_secret_access_key = XXXXXXXXXXXXXXXXXXX
|
||||
region = cn-north-1
|
||||
```
|
||||
> With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
|
||||
|
||||
|
||||
## AWS GovCloud (US)
|
||||
|
||||
To scan your AWS account in the GovCloud (US) partition (`aws-us-gov`):
|
||||
|
||||
- Using the `-f/--region` flag:
|
||||
```
|
||||
prowler aws --region us-gov-east-1 us-gov-west-1
|
||||
```
|
||||
- Using the region configured in your AWS profile at `~/.aws/credentials` or `~/.aws/config`:
|
||||
```
|
||||
[default]
|
||||
aws_access_key_id = XXXXXXXXXXXXXXXXXXX
|
||||
aws_secret_access_key = XXXXXXXXXXXXXXXXXXX
|
||||
region = us-gov-east-1
|
||||
```
|
||||
> With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
|
||||
|
||||
|
||||
## AWS ISO (US & Europe)
|
||||
|
||||
For the AWS ISO partitions, which are known as "secret partitions" and are air-gapped from the Internet, there is no builtin way to scan it. If you want to audit an AWS account in one of the AWS ISO partitions you should manually update the [aws_regions_by_service.json](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/aws_regions_by_service.json) and include the partition, region and services, e.g.:
|
||||
```json
|
||||
"iam": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"eu-west-1",
|
||||
"us-east-1",
|
||||
],
|
||||
"aws-cn": [
|
||||
"cn-north-1",
|
||||
"cn-northwest-1"
|
||||
],
|
||||
"aws-us-gov": [
|
||||
"us-gov-east-1",
|
||||
"us-gov-west-1"
|
||||
],
|
||||
"aws-iso": [
|
||||
"aws-iso-global",
|
||||
"us-iso-east-1",
|
||||
"us-iso-west-1"
|
||||
],
|
||||
"aws-iso-b": [
|
||||
"aws-iso-b-global",
|
||||
"us-isob-east-1"
|
||||
],
|
||||
"aws-iso-e": [],
|
||||
}
|
||||
},
|
||||
```
|
||||
@@ -13,55 +13,35 @@ Before sending findings to Prowler, you will need to perform next steps:
|
||||
- Using the AWS Management Console:
|
||||

|
||||
3. Allow Prowler to import its findings to AWS Security Hub by adding the policy below to the role or user running Prowler:
|
||||
- [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
|
||||
- [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/iam/prowler-security-hub.json)
|
||||
|
||||
Once it is enabled, it is as simple as running the command below (for all regions):
|
||||
|
||||
```sh
|
||||
prowler aws -S
|
||||
./prowler aws -S
|
||||
```
|
||||
|
||||
or for only one filtered region like eu-west-1:
|
||||
|
||||
```sh
|
||||
prowler -S -f eu-west-1
|
||||
./prowler -S -f eu-west-1
|
||||
```
|
||||
|
||||
> **Note 1**: It is recommended to send only fails to Security Hub and that is possible adding `-q` to the command.
|
||||
|
||||
> **Note 2**: Since Prowler perform checks to all regions by default you may need to filter by region when runing Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f <region>` (if no region is used it will try to push findings in all regions hubs). Prowler will send findings to the Security Hub on the region where the scanned resource is located.
|
||||
> **Note 2**: Since Prowler perform checks to all regions by defauls you may need to filter by region when runing Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f <region>` (if no region is used it will try to push findings in all regions hubs).
|
||||
|
||||
> **Note 3**: To have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
|
||||
> **Note 3** to have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
|
||||
|
||||
Once you run findings for first time you will be able to see Prowler findings in Findings section:
|
||||
|
||||

|
||||
|
||||
## Send findings to Security Hub assuming an IAM Role
|
||||
|
||||
When you are auditing a multi-account AWS environment, you can send findings to a Security Hub of another account by assuming an IAM role from that account using the `-R` flag in the Prowler command:
|
||||
|
||||
```sh
|
||||
prowler -S -R arn:aws:iam::123456789012:role/ProwlerExecRole
|
||||
```
|
||||
|
||||
> Remember that the used role needs to have permissions to send findings to Security Hub. To get more information about the permissions required, please refer to the following IAM policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
|
||||
|
||||
|
||||
## Send only failed findings to Security Hub
|
||||
|
||||
When using Security Hub it is recommended to send only the failed findings generated. To follow that recommendation you could add the `-q` flag to the Prowler command:
|
||||
|
||||
```sh
|
||||
prowler -S -q
|
||||
```
|
||||
|
||||
|
||||
## Skip sending updates of findings to Security Hub
|
||||
|
||||
By default, Prowler archives all its findings in Security Hub that have not appeared in the last scan.
|
||||
You can skip this logic by using the option `--skip-sh-update` so Prowler will not archive older findings:
|
||||
|
||||
```sh
|
||||
prowler -S --skip-sh-update
|
||||
./prowler -S --skip-sh-update
|
||||
```
|
||||
|
||||
@@ -31,7 +31,7 @@ checks_v3_to_v2_mapping = {
|
||||
"awslambda_function_url_cors_policy": "extra7180",
|
||||
"awslambda_function_url_public": "extra7179",
|
||||
"awslambda_function_using_supported_runtimes": "extra762",
|
||||
"cloudformation_stack_outputs_find_secrets": "extra742",
|
||||
"cloudformation_outputs_find_secrets": "extra742",
|
||||
"cloudformation_stacks_termination_protection_enabled": "extra7154",
|
||||
"cloudfront_distributions_field_level_encryption_enabled": "extra767",
|
||||
"cloudfront_distributions_geo_restrictions_enabled": "extra732",
|
||||
@@ -113,6 +113,7 @@ checks_v3_to_v2_mapping = {
|
||||
"ec2_securitygroup_allow_wide_open_public_ipv4": "extra778",
|
||||
"ec2_securitygroup_default_restrict_traffic": "check43",
|
||||
"ec2_securitygroup_from_launch_wizard": "extra7173",
|
||||
"ec2_securitygroup_in_use_without_ingress_filtering": "extra74",
|
||||
"ec2_securitygroup_not_used": "extra75",
|
||||
"ec2_securitygroup_with_many_ingress_egress_rules": "extra777",
|
||||
"ecr_repositories_lifecycle_policy_enabled": "extra7194",
|
||||
@@ -137,6 +138,7 @@ checks_v3_to_v2_mapping = {
|
||||
"elbv2_internet_facing": "extra79",
|
||||
"elbv2_listeners_underneath": "extra7158",
|
||||
"elbv2_logging_enabled": "extra717",
|
||||
"elbv2_request_smugling": "extra7142",
|
||||
"elbv2_ssl_listeners": "extra793",
|
||||
"elbv2_waf_acl_attached": "extra7129",
|
||||
"emr_cluster_account_public_block_enabled": "extra7178",
|
||||
|
||||
@@ -81,4 +81,36 @@ Standard results will be shown and additionally the framework information as the
|
||||
|
||||
## Create and contribute adding other Security Frameworks
|
||||
|
||||
This information is part of the Developer Guide and can be found here: https://docs.prowler.cloud/en/latest/tutorials/developer-guide/.
|
||||
If you want to create or contribute with your own security frameworks or add public ones to Prowler you need to make sure the checks are available if not you have to create your own. Then create a compliance file per provider like in `prowler/compliance/aws/` and name it as `<framework>_<version>_<provider>.json` then follow the following format to create yours.
|
||||
|
||||
Each file version of a framework will have the following structure at high level with the case that each framework needs to be generally identified), one requirement can be also called one control but one requirement can be linked to multiple prowler checks.:
|
||||
|
||||
- `Framework`: string. Indistiguish name of the framework, like CIS
|
||||
- `Provider`: string. Provider where the framework applies, such as AWS, Azure, OCI,...
|
||||
- `Version`: string. Version of the framework itself, like 1.4 for CIS.
|
||||
- `Requirements`: array of objects. Include all requirements or controls with the mapping to Prowler.
|
||||
- `Requirements_Id`: string. Unique identifier per each requirement in the specific framework
|
||||
- `Requirements_Description`: string. Description as in the framework.
|
||||
- `Requirements_Attributes`: array of objects. Includes all needed attributes per each requirement, like levels, sections, etc. Whatever helps to create a dedicated report with the result of the findings. Attributes would be taken as closely as possible from the framework's own terminology directly.
|
||||
- `Requirements_Checks`: array. Prowler checks that are needed to prove this requirement. It can be one or multiple checks. In case of no automation possible this can be empty.
|
||||
|
||||
```
|
||||
{
|
||||
"Framework": "<framework>-<provider>",
|
||||
"Version": "<version>",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "<unique-id>",
|
||||
"Description": "Requiemente full description",
|
||||
"Checks": [
|
||||
"Here is the prowler check or checks that is going to be executed"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
<Add here your custom attributes.>
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Finally, to have a proper output file for your reports, your framework data model has to be created in `prowler/lib/outputs/models.py` and also the CLI table output in `prowler/lib/outputs/compliance.py`.
|
||||
|
||||
@@ -1,281 +0,0 @@
|
||||
# Developer Guide
|
||||
|
||||
You can extend Prowler in many different ways, in most cases you will want to create your own checks and compliance security frameworks, here is where you can learn about how to get started with it. We also include how to create custom outputs, integrations and more.
|
||||
|
||||
## Get the code and install all dependencies
|
||||
|
||||
First of all, you need a version of Python 3.9 or higher and also pip installed to be able to install all dependencies requred. Once that is satisfied go a head and clone the repo:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
```
|
||||
For isolation and avoid conflicts with other environments, we recommend usage of `poetry`:
|
||||
```
|
||||
pip install poetry
|
||||
```
|
||||
Then install all dependencies including the ones for developers:
|
||||
```
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
## Contributing with your code or fixes to Prowler
|
||||
|
||||
This repo has git pre-commit hooks managed via the pre-commit tool. Install it how ever you like, then in the root of this repo run:
|
||||
```
|
||||
pre-commit install
|
||||
```
|
||||
You should get an output like the following:
|
||||
```
|
||||
pre-commit installed at .git/hooks/pre-commit
|
||||
```
|
||||
|
||||
Before we merge any of your pull requests we pass checks to the code, we use the following tools and automation to make sure the code is secure and dependencies up-to-dated (these should have been already installed if you ran `pipenv install -d`):
|
||||
|
||||
- `bandit` for code security review.
|
||||
- `safety` and `dependabot` for dependencies.
|
||||
- `hadolint` and `dockle` for our containers security.
|
||||
- `snyk` in Docker Hub.
|
||||
- `clair` in Amazon ECR.
|
||||
- `vulture`, `flake8`, `black` and `pylint` for formatting and best practices.
|
||||
|
||||
You can see all dependencies in file `Pipfile`.
|
||||
|
||||
## Create a new check for a Provider
|
||||
|
||||
### If the check you want to create belongs to an existing service
|
||||
|
||||
To create a new check, you will need to create a folder inside the specific service, i.e. `prowler/providers/<provider>/services/<service>/<check_name>/`, with the name of check following the pattern: `service_subservice_action`.
|
||||
Inside that folder, create the following files:
|
||||
|
||||
- An empty `__init__.py`: to make Python treat this check folder as a package.
|
||||
- A `check_name.py` containing the check's logic, for example:
|
||||
```
|
||||
# Import the Check_Report of the specific provider
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
# Import the client of the specific service
|
||||
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
|
||||
|
||||
# Create the class for the check
|
||||
class ec2_ebs_volume_encryption(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
# Iterate the service's asset that want to be analyzed
|
||||
for volume in ec2_client.volumes:
|
||||
# Initialize a Check Report for each item and assign the region, resource_id, resource_arn and resource_tags
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = volume.region
|
||||
report.resource_id = volume.id
|
||||
report.resource_arn = volume.arn
|
||||
report.resource_tags = volume.tags
|
||||
# Make the logic with conditions and create a PASS and a FAIL with a status and a status_extended
|
||||
if volume.encrypted:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"EBS Snapshot {volume.id} is encrypted."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"EBS Snapshot {volume.id} is unencrypted."
|
||||
findings.append(report) # Append a report for each item
|
||||
|
||||
return findings
|
||||
```
|
||||
- A `check_name.metadata.json` containing the check's metadata, for example:
|
||||
```
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "ec2_ebs_volume_encryption",
|
||||
"CheckTitle": "Ensure there are no EBS Volumes unencrypted.",
|
||||
"CheckType": [
|
||||
"Data Protection"
|
||||
],
|
||||
"ServiceName": "ec2",
|
||||
"SubServiceName": "volume",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsEc2Volume",
|
||||
"Description": "Ensure there are no EBS Volumes unencrypted.",
|
||||
"Risk": "Data encryption at rest prevents data visibility in the event of its unauthorized access or theft.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Encrypt all EBS volumes and Enable Encryption by default You can configure your AWS account to enforce the encryption of the new EBS volumes and snapshot copies that you create. For example; Amazon EBS encrypts the EBS volumes created when you launch an instance and the snapshots that you copy from an unencrypted snapshot.",
|
||||
"Url": "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
```
|
||||
|
||||
### If the check you want to create belongs to a service not supported already by Prowler you will need to create a new service first
|
||||
|
||||
To create a new service, you will need to create a folder inside the specific provider, i.e. `prowler/providers/<provider>/services/<service>/`.
|
||||
Inside that folder, create the following files:
|
||||
|
||||
- An empty `__init__.py`: to make Python treat this service folder as a package.
|
||||
- A `<service>_service.py`, containing all the service's logic and API Calls:
|
||||
```
|
||||
# You must import the following libraries
|
||||
import threading
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
|
||||
from prowler.providers.aws.aws_provider import generate_regional_clients
|
||||
|
||||
|
||||
# Create a class for the Service
|
||||
################## <Service>
|
||||
class <Service>:
|
||||
def __init__(self, audit_info):
|
||||
self.service = "<service>" # The name of the service boto3 client
|
||||
self.session = audit_info.audit_session
|
||||
self.audited_account = audit_info.audited_account
|
||||
self.audit_resources = audit_info.audit_resources
|
||||
self.regional_clients = generate_regional_clients(self.service, audit_info)
|
||||
self.<items> = [] # Create an empty list of the items to be gathered, e.g., instances
|
||||
self.__threading_call__(self.__describe_<items>__)
|
||||
self.__describe_<item>__() # Optionally you can create another function to retrieve more data about each item
|
||||
|
||||
def __get_session__(self):
|
||||
return self.session
|
||||
|
||||
def __threading_call__(self, call):
|
||||
threads = []
|
||||
for regional_client in self.regional_clients.values():
|
||||
threads.append(threading.Thread(target=call, args=(regional_client,)))
|
||||
for t in threads:
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
def __describe_<items>__(self, regional_client):
|
||||
"""Get ALL <Service> <Items>"""
|
||||
logger.info("<Service> - Describing <Items>...")
|
||||
try:
|
||||
describe_<items>_paginator = regional_client.get_paginator("describe_<items>") # Paginator to get every item
|
||||
for page in describe_<items>_paginator.paginate():
|
||||
for <item> in page["<Items>"]:
|
||||
if not self.audit_resources or (
|
||||
is_resource_filtered(<item>["<item_arn>"], self.audit_resources)
|
||||
):
|
||||
self.<items>.append(
|
||||
<Item>(
|
||||
arn=stack["<item_arn>"],
|
||||
name=stack["<item_name>"],
|
||||
tags=stack.get("Tags", []),
|
||||
region=regional_client.region,
|
||||
)
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __describe_<item>__(self):
|
||||
"""Get Details for a <Service> <Item>"""
|
||||
logger.info("<Service> - Describing <Item> to get specific details...")
|
||||
try:
|
||||
for <item> in self.<items>:
|
||||
<item>_details = self.regional_clients[<item>.region].describe_<item>(
|
||||
<Attribute>=<item>.name
|
||||
)
|
||||
# For example, check if item is Public
|
||||
<item>.public = <item>_details.get("Public", False)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{<item>.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class <Item>(BaseModel):
|
||||
"""<Item> holds a <Service> <Item>"""
|
||||
|
||||
arn: str
|
||||
"""<Items>[].Arn"""
|
||||
name: str
|
||||
"""<Items>[].Name"""
|
||||
public: bool
|
||||
"""<Items>[].Public"""
|
||||
tags: Optional[list] = []
|
||||
region: str
|
||||
|
||||
```
|
||||
- A `<service>_client_.py`, containing the initialization of the service's class we have just created so the service's checks can use them:
|
||||
```
|
||||
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
|
||||
from prowler.providers.aws.services.<service>.<service>_service import <Service>
|
||||
|
||||
<service>_client = <Service>(current_audit_info)
|
||||
```
|
||||
|
||||
## Create a new security compliance framework
|
||||
|
||||
If you want to create or contribute with your own security frameworks or add public ones to Prowler you need to make sure the checks are available if not you have to create your own. Then create a compliance file per provider like in `prowler/compliance/aws/` and name it as `<framework>_<version>_<provider>.json` then follow the following format to create yours.
|
||||
|
||||
Each file version of a framework will have the following structure at high level with the case that each framework needs to be generally identified, one requirement can be also called one control but one requirement can be linked to multiple prowler checks.:
|
||||
|
||||
- `Framework`: string. Indistiguish name of the framework, like CIS
|
||||
- `Provider`: string. Provider where the framework applies, such as AWS, Azure, OCI,...
|
||||
- `Version`: string. Version of the framework itself, like 1.4 for CIS.
|
||||
- `Requirements`: array of objects. Include all requirements or controls with the mapping to Prowler.
|
||||
- `Requirements_Id`: string. Unique identifier per each requirement in the specific framework
|
||||
- `Requirements_Description`: string. Description as in the framework.
|
||||
- `Requirements_Attributes`: array of objects. Includes all needed attributes per each requirement, like levels, sections, etc. Whatever helps to create a dedicated report with the result of the findings. Attributes would be taken as closely as possible from the framework's own terminology directly.
|
||||
- `Requirements_Checks`: array. Prowler checks that are needed to prove this requirement. It can be one or multiple checks. In case of no automation possible this can be empty.
|
||||
|
||||
```
|
||||
{
|
||||
"Framework": "<framework>-<provider>",
|
||||
"Version": "<version>",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "<unique-id>",
|
||||
"Description": "Requiemente full description",
|
||||
"Checks": [
|
||||
"Here is the prowler check or checks that is going to be executed"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
<Add here your custom attributes.>
|
||||
}
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Finally, to have a proper output file for your reports, your framework data model has to be created in `prowler/lib/outputs/models.py` and also the CLI table output in `prowler/lib/outputs/compliance.py`.
|
||||
|
||||
|
||||
## Create a custom output format
|
||||
|
||||
## Create a new integration
|
||||
|
||||
## Contribute with documentation
|
||||
|
||||
We use `mkdocs` to build this Prowler documentation site so you can easely contribute back with new docs or improving them.
|
||||
|
||||
1. Install `mkdocs` with your favorite package manager.
|
||||
2. Inside the `prowler` repository folder run `mkdocs serve` and point your browser to `http://localhost:8000` and you will see live changes to your local copy of this documentation site.
|
||||
3. Make all needed changes to docs or add new documents. To do so just edit existing md files inside `prowler/docs` and if you are adding a new section or file please make sure you add it to `mkdocs.yaml` file in the root folder of the Prowler repo.
|
||||
4. Once you are done with changes, please send a pull request to us for review and merge. Thank you in advance!
|
||||
|
||||
## Want some swag as appreciation for your contribution?
|
||||
|
||||
If you are like us and you love swag, we are happy to thank you for your contribution with some laptop stickers or whatever other swag we may have at that time. Please, tell us more details and your pull request link in our [Slack workspace here](https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog). You can also reach out to Toni de la Fuente on Twitter [here](https://twitter.com/ToniBlyx), his DMs are open.
|
||||
@@ -1,29 +0,0 @@
|
||||
# GCP authentication
|
||||
|
||||
Prowler will use by default your User Account credentials, you can configure it using:
|
||||
|
||||
- `gcloud init` to use a new account
|
||||
- `gcloud config set account <account>` to use an existing account
|
||||
|
||||
Then, obtain your access credentials using: `gcloud auth application-default login`
|
||||
|
||||
Otherwise, you can generate and download Service Account keys in JSON format (refer to https://cloud.google.com/iam/docs/creating-managing-service-account-keys) and provide the location of the file with the following argument:
|
||||
|
||||
```console
|
||||
prowler gcp --credentials-file path
|
||||
```
|
||||
|
||||
> `prowler` will scan the GCP project associated with the credentials.
|
||||
|
||||
|
||||
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
|
||||
|
||||
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
|
||||
|
||||
- Viewer
|
||||
- Security Reviewer
|
||||
- Stackdriver Account Viewer
|
||||
|
Before Width: | Height: | Size: 61 KiB |
|
Before Width: | Height: | Size: 67 KiB |
|
Before Width: | Height: | Size: 200 KiB |
|
Before Width: | Height: | Size: 456 KiB |
|
Before Width: | Height: | Size: 69 KiB |
@@ -1,36 +0,0 @@
|
||||
# Integrations
|
||||
|
||||
## Slack
|
||||
|
||||
Prowler can be integrated with [Slack](https://slack.com/) to send a summary of the execution having configured a Slack APP in your channel with the following command:
|
||||
|
||||
```sh
|
||||
prowler <provider> --slack
|
||||
```
|
||||
|
||||

|
||||
|
||||
> Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables.
|
||||
### Configuration
|
||||
|
||||
To configure the Slack Integration, follow the next steps:
|
||||
|
||||
1. Create a Slack Application:
|
||||
- Go to [Slack API page](https://api.slack.com/tutorials/tracks/getting-a-token), scroll down to the *Create app* button and select your workspace:
|
||||

|
||||
|
||||
- Install the application in your selected workspaces:
|
||||

|
||||
|
||||
- Get the *Slack App OAuth Token* that Prowler needs to send the message:
|
||||

|
||||
|
||||
2. Optionally, create a Slack Channel (you can use an existing one)
|
||||
|
||||
3. Integrate the created Slack App to your Slack channel:
|
||||
- Click on the channel, go to the Integrations tab, and Add an App.
|
||||

|
||||
|
||||
4. Set the following environment variables that Prowler will read:
|
||||
- `SLACK_API_TOKEN`: the *Slack App OAuth Token* that was previously get.
|
||||
- `SLACK_CHANNEL_ID`: the name of your Slack Channel where Prowler will send the message.
|
||||
@@ -51,30 +51,15 @@ prowler <provider> -e/--excluded-checks ec2 rds
|
||||
```console
|
||||
prowler <provider> -C/--checks-file <checks_list>.json
|
||||
```
|
||||
## Custom Checks
|
||||
Prowler allows you to include your custom checks with the flag:
|
||||
|
||||
## Severities
|
||||
Each check of Prowler has a severity, there are options related with it:
|
||||
|
||||
- List the available checks in the provider:
|
||||
```console
|
||||
prowler <provider> -x/--checks-folder <custom_checks_folder>
|
||||
prowler <provider> --list-severities
|
||||
```
|
||||
> S3 URIs are also supported as folders for custom checks, e.g. s3://bucket/prefix/checks_folder/. Make sure that the used credentials have s3:GetObject permissions in the S3 path where the custom checks are located.
|
||||
|
||||
The custom checks folder must contain one subfolder per check, each subfolder must be named as the check and must contain:
|
||||
|
||||
- An empty `__init__.py`: to make Python treat this check folder as a package.
|
||||
- A `check_name.py` containing the check's logic.
|
||||
- A `check_name.metadata.json` containing the check's metadata.
|
||||
>The check name must start with the service name followed by an underscore (e.g., ec2_instance_public_ip).
|
||||
|
||||
To see more information about how to write checks see the [Developer Guide](../developer-guide/#create-a-new-check-for-a-provider).
|
||||
## Severities
|
||||
Each of Prowler's checks has a severity, which can be:
|
||||
- informational
|
||||
- low
|
||||
- medium
|
||||
- high
|
||||
- critical
|
||||
|
||||
To execute specific severity(s):
|
||||
- Execute specific severity(s):
|
||||
```console
|
||||
prowler <provider> --severity critical high
|
||||
```
|
||||
|
||||
@@ -33,8 +33,9 @@ Several checks analyse resources that are exposed to the Internet, these are:
|
||||
- ec2_instance_internet_facing_with_instance_profile
|
||||
- ec2_instance_public_ip
|
||||
- ec2_networkacl_allow_ingress_any_port
|
||||
- ec2_securitygroup_allow_wide_open_public_ipv4
|
||||
- ec2_securitygroup_allow_ingress_from_internet_to_any_port
|
||||
- ec2_securitygroup_allow_wide_open_public_ipv4
|
||||
- ec2_securitygroup_in_use_without_ingress_filtering
|
||||
- ecr_repositories_not_publicly_accessible
|
||||
- eks_control_plane_endpoint_access_restricted
|
||||
- eks_endpoints_not_publicly_accessible
|
||||
|
||||
@@ -14,6 +14,4 @@ prowler <provider> -i
|
||||
|
||||
- Also, it creates by default a CSV and JSON to see detailed information about the resources extracted.
|
||||
|
||||

|
||||
|
||||
> The inventorying process is done with `resourcegroupstaggingapi` calls (except for the IAM resources which are done with Boto3 API calls.)
|
||||

|
||||
|
||||
@@ -46,11 +46,9 @@ Prowler supports natively the following output formats:
|
||||
|
||||
Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
|
||||
### HTML
|
||||

|
||||
### CSV
|
||||
| ASSESSMENT_START_TIME | FINDING_UNIQUE_ID | PROVIDER | PROFILE | ACCOUNT_ID | ACCOUNT_NAME | ACCOUNT_EMAIL | ACCOUNT_ARN | ACCOUNT_ORG | ACCOUNT_TAGS | REGION | CHECK_ID | CHECK_TITLE | CHECK_TYPE | STATUS | STATUS_EXTENDED | SERVICE_NAME | SUBSERVICE_NAME | SEVERITY | RESOURCE_ID | RESOURCE_ARN | RESOURCE_TYPE | RESOURCE_DETAILS | RESOURCE_TAGS | DESCRIPTION | COMPLIANCE | RISK | RELATED_URL | REMEDIATION_RECOMMENDATION_TEXT | REMEDIATION_RECOMMENDATION_URL | REMEDIATION_RECOMMENDATION_CODE_NATIVEIAC | REMEDIATION_RECOMMENDATION_CODE_TERRAFORM | REMEDIATION_RECOMMENDATION_CODE_CLI | REMEDIATION_RECOMMENDATION_CODE_OTHER | CATEGORIES | DEPENDS_ON | RELATED_TO | NOTES |
|
||||
| ------- | ----------- | ------ | -------- | ------------ | ----------- | ---------- | ---------- | --------------------- | -------------------------- | -------------- | ----------------- | ------------------------ | --------------- | ---------- | ----------------- | --------- | -------------- | ----------------- | ------------------ | --------------------- | -------------------- | ------------------- | ------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- |
|
||||
| ASSESSMENT_START_TIME | FINDING_UNIQUE_ID | PROVIDER | PROFILE | ACCOUNT_ID | ACCOUNT_NAME | ACCOUNT_EMAIL | ACCOUNT_ARN | ACCOUNT_ORG | ACCOUNT_TAGS | REGION | CHECK_ID | CHECK_TITLE | CHECK_TYPE | STATUS | STATUS_EXTENDED | SERVICE_NAME | SUBSERVICE_NAME | SEVERITY | RESOURCE_ID | RESOURCE_ARN | RESOURCE_TYPE | RESOURCE_DETAILS | RESOURCE_TAGS | DESCRIPTION | RISK | RELATED_URL | REMEDIATION_RECOMMENDATION_TEXT | REMEDIATION_RECOMMENDATION_URL | REMEDIATION_RECOMMENDATION_CODE_NATIVEIAC | REMEDIATION_RECOMMENDATION_CODE_TERRAFORM | REMEDIATION_RECOMMENDATION_CODE_CLI | REMEDIATION_RECOMMENDATION_CODE_OTHER | CATEGORIES | DEPENDS_ON | RELATED_TO | NOTES |
|
||||
| ------- | ----------- | ------ | -------- | ------------ | ----------- | ---------- | ---------- | --------------------- | -------------------------- | -------------- | ----------------- | ------------------------ | --------------- | ---------- | ----------------- | --------- | -------------- | ----------------- | ------------------ | --------------------- | -------------------- | ------------------- | ------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- |
|
||||
|
||||
### JSON
|
||||
|
||||
@@ -73,10 +71,6 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
"Severity": "low",
|
||||
"ResourceId": "rds-instance-id",
|
||||
"ResourceArn": "",
|
||||
"ResourceTags": {
|
||||
"test": "test",
|
||||
"enironment": "dev"
|
||||
},
|
||||
"ResourceType": "AwsRdsDbInstance",
|
||||
"ResourceDetails": "",
|
||||
"Description": "Ensure RDS instances have minor version upgrade enabled.",
|
||||
@@ -95,15 +89,7 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Notes": "",
|
||||
"Compliance": {
|
||||
"CIS-1.4": [
|
||||
"1.20"
|
||||
],
|
||||
"CIS-1.5": [
|
||||
"1.20"
|
||||
]
|
||||
}
|
||||
"Notes": ""
|
||||
},{
|
||||
"AssessmentStartTime": "2022-12-01T14:16:57.354413",
|
||||
"FindingUniqueId": "",
|
||||
@@ -123,7 +109,7 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
"ResourceId": "rds-instance-id",
|
||||
"ResourceArn": "",
|
||||
"ResourceType": "AwsRdsDbInstance",
|
||||
"ResourceTags": {},
|
||||
"ResourceDetails": "",
|
||||
"Description": "Ensure RDS instances have minor version upgrade enabled.",
|
||||
"Risk": "Auto Minor Version Upgrade is a feature that you can enable to have your database automatically upgraded when a new minor database engine version is available. Minor version upgrades often patch security vulnerabilities and fix bugs and therefore should be applied.",
|
||||
"RelatedUrl": "https://aws.amazon.com/blogs/database/best-practices-for-upgrading-amazon-rds-to-major-and-minor-versions-of-postgresql/",
|
||||
@@ -140,8 +126,7 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Notes": "",
|
||||
"Compliance: {}
|
||||
"Notes": ""
|
||||
}]
|
||||
```
|
||||
|
||||
@@ -181,30 +166,7 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
],
|
||||
"Compliance": {
|
||||
"Status": "PASSED",
|
||||
"RelatedRequirements": [
|
||||
"CISA your-systems-2 booting-up-thing-to-do-first-3",
|
||||
"CIS-1.5 2.3.2",
|
||||
"AWS-Foundational-Security-Best-Practices rds",
|
||||
"RBI-Cyber-Security-Framework annex_i_6",
|
||||
"FFIEC d3-cc-pm-b-1 d3-cc-pm-b-3"
|
||||
],
|
||||
"AssociatedStandards": [
|
||||
{
|
||||
"StandardsId": "CISA"
|
||||
},
|
||||
{
|
||||
"StandardsId": "CIS-1.5"
|
||||
},
|
||||
{
|
||||
"StandardsId": "AWS-Foundational-Security-Best-Practices"
|
||||
},
|
||||
{
|
||||
"StandardsId": "RBI-Cyber-Security-Framework"
|
||||
},
|
||||
{
|
||||
"StandardsId": "FFIEC"
|
||||
}
|
||||
]
|
||||
"RelatedRequirements": []
|
||||
},
|
||||
"Remediation": {
|
||||
"Recommendation": {
|
||||
@@ -243,30 +205,7 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
],
|
||||
"Compliance": {
|
||||
"Status": "PASSED",
|
||||
"RelatedRequirements": [
|
||||
"CISA your-systems-2 booting-up-thing-to-do-first-3",
|
||||
"CIS-1.5 2.3.2",
|
||||
"AWS-Foundational-Security-Best-Practices rds",
|
||||
"RBI-Cyber-Security-Framework annex_i_6",
|
||||
"FFIEC d3-cc-pm-b-1 d3-cc-pm-b-3"
|
||||
],
|
||||
"AssociatedStandards": [
|
||||
{
|
||||
"StandardsId": "CISA"
|
||||
},
|
||||
{
|
||||
"StandardsId": "CIS-1.5"
|
||||
},
|
||||
{
|
||||
"StandardsId": "AWS-Foundational-Security-Best-Practices"
|
||||
},
|
||||
{
|
||||
"StandardsId": "RBI-Cyber-Security-Framework"
|
||||
},
|
||||
{
|
||||
"StandardsId": "FFIEC"
|
||||
}
|
||||
]
|
||||
"RelatedRequirements": []
|
||||
},
|
||||
"Remediation": {
|
||||
"Recommendation": {
|
||||
|
||||
@@ -33,17 +33,14 @@ nav:
|
||||
- Reporting: tutorials/reporting.md
|
||||
- Compliance: tutorials/compliance.md
|
||||
- Quick Inventory: tutorials/quick-inventory.md
|
||||
- Integrations: tutorials/integrations.md
|
||||
- Configuration File: tutorials/configuration_file.md
|
||||
- Logging: tutorials/logging.md
|
||||
- Allowlist: tutorials/allowlist.md
|
||||
- Pentesting: tutorials/pentesting.md
|
||||
- Developer Guide: tutorials/developer-guide.md
|
||||
- AWS:
|
||||
- Assume Role: tutorials/aws/role-assumption.md
|
||||
- AWS Security Hub: tutorials/aws/securityhub.md
|
||||
- AWS Organizations: tutorials/aws/organizations.md
|
||||
- AWS Regions and Partitions: tutorials/aws/regions-and-partitions.md
|
||||
- Scan Multiple AWS Accounts: tutorials/aws/multiaccount.md
|
||||
- AWS CloudShell: tutorials/aws/cloudshell.md
|
||||
- Checks v2 to v3 Mapping: tutorials/aws/v2_to_v3_checks_mapping.md
|
||||
@@ -53,9 +50,6 @@ nav:
|
||||
- Azure:
|
||||
- Authentication: tutorials/azure/authentication.md
|
||||
- Subscriptions: tutorials/azure/subscriptions.md
|
||||
- Google Cloud:
|
||||
- Authentication: tutorials/gcp/authentication.md
|
||||
- Developer Guide: tutorials/developer-guide.md
|
||||
- Security: security.md
|
||||
- Contact Us: contact.md
|
||||
- Troubleshooting: troubleshooting.md
|
||||
|
||||
@@ -4,7 +4,7 @@ AWSTemplateFormatVersion: '2010-09-09'
|
||||
# aws cloudformation create-stack \
|
||||
# --capabilities CAPABILITY_IAM --capabilities CAPABILITY_NAMED_IAM \
|
||||
# --template-body "file://create_role_to_assume_cfn.yaml" \
|
||||
# --stack-name "ProwlerScanRole" \
|
||||
# --stack-name "ProwlerExecRole" \
|
||||
# --parameters "ParameterKey=AuthorisedARN,ParameterValue=arn:aws:iam::123456789012:root"
|
||||
#
|
||||
Description: |
|
||||
@@ -13,7 +13,7 @@ Description: |
|
||||
account to assume that role. The role name and the ARN of the trusted user can all be passed
|
||||
to the CloudFormation stack as parameters. Then you can run Prowler to perform a security
|
||||
assessment with a command like:
|
||||
prowler --role ProwlerScanRole.ARN
|
||||
./prowler -A <THIS_ACCOUNT_ID> -R ProwlerExecRole
|
||||
Parameters:
|
||||
AuthorisedARN:
|
||||
Description: |
|
||||
@@ -22,12 +22,12 @@ Parameters:
|
||||
Type: String
|
||||
ProwlerRoleName:
|
||||
Description: |
|
||||
Name of the IAM role that will have these policies attached. Default: ProwlerScanRole
|
||||
Name of the IAM role that will have these policies attached. Default: ProwlerExecRole
|
||||
Type: String
|
||||
Default: 'ProwlerScanRole'
|
||||
Default: 'ProwlerExecRole'
|
||||
|
||||
Resources:
|
||||
ProwlerScanRole:
|
||||
ProwlerExecRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
@@ -42,49 +42,31 @@ Resources:
|
||||
# Bool:
|
||||
# 'aws:MultiFactorAuthPresent': true
|
||||
# This is 12h that is maximum allowed, Minimum is 3600 = 1h
|
||||
# to take advantage of this use -T like in './prowler --role ProwlerScanRole.ARN -T 43200'
|
||||
# to take advantage of this use -T like in './prowler -A <ACCOUNT_ID_TO_ASSUME> -R ProwlerExecRole -T 43200 -M text,html'
|
||||
MaxSessionDuration: 43200
|
||||
ManagedPolicyArns:
|
||||
- 'arn:aws:iam::aws:policy/SecurityAudit'
|
||||
- 'arn:aws:iam::aws:policy/job-function/ViewOnlyAccess'
|
||||
RoleName: !Sub ${ProwlerRoleName}
|
||||
Policies:
|
||||
- PolicyName: ProwlerScanRoleAdditionalViewPrivileges
|
||||
- PolicyName: ProwlerExecRoleAdditionalViewPrivileges
|
||||
PolicyDocument:
|
||||
Version : '2012-10-17'
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- 'account:Get*'
|
||||
- 'appstream:Describe*'
|
||||
- 'appstream:List*'
|
||||
- 'codeartifact:List*'
|
||||
- 'codebuild:BatchGet*'
|
||||
- 'ds:Get*'
|
||||
- 'ds:Describe*'
|
||||
- 'ds:List*'
|
||||
- 'ds:ListAuthorizedApplications'
|
||||
- 'ec2:GetEbsEncryptionByDefault'
|
||||
- 'ecr:Describe*'
|
||||
- 'elasticfilesystem:DescribeBackupPolicy'
|
||||
- 'glue:GetConnections'
|
||||
- 'glue:GetSecurityConfiguration*'
|
||||
- 'glue:GetSecurityConfiguration'
|
||||
- 'glue:SearchTables'
|
||||
- 'lambda:GetFunction*'
|
||||
- 'macie2:GetMacieSession'
|
||||
- 'lambda:GetFunction'
|
||||
- 's3:GetAccountPublicAccessBlock'
|
||||
- 'shield:DescribeProtection'
|
||||
- 'shield:GetSubscriptionState'
|
||||
- 'securityhub:BatchImportFindings'
|
||||
- 'securityhub:GetFindings'
|
||||
- 'ssm:GetDocument'
|
||||
- 'support:Describe*'
|
||||
- 'tag:GetTagKeys'
|
||||
Resource: '*'
|
||||
- PolicyName: ProwlerScanRoleAdditionalViewPrivilegesApiGateway
|
||||
PolicyDocument:
|
||||
Version : '2012-10-17'
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- 'apigateway:GET'
|
||||
Resource: 'arn:aws:apigateway:*::/restapis/*'
|
||||
|
||||
@@ -3,34 +3,24 @@
|
||||
"Statement": [
|
||||
{
|
||||
"Action": [
|
||||
"account:Get*",
|
||||
"appstream:Describe*",
|
||||
"appstream:List*",
|
||||
"backup:List*",
|
||||
"cloudtrail:GetInsightSelectors",
|
||||
"codeartifact:List*",
|
||||
"codebuild:BatchGet*",
|
||||
"drs:Describe*",
|
||||
"ds:Get*",
|
||||
"ds:Describe*",
|
||||
"ds:Get*",
|
||||
"ds:List*",
|
||||
"ec2:GetEbsEncryptionByDefault",
|
||||
"ecr:Describe*",
|
||||
"ecr:GetRegistryScanningConfiguration",
|
||||
"elasticfilesystem:DescribeBackupPolicy",
|
||||
"glue:GetConnections",
|
||||
"glue:GetSecurityConfiguration*",
|
||||
"glue:SearchTables",
|
||||
"lambda:GetFunction*",
|
||||
"logs:FilterLogEvents",
|
||||
"macie2:GetMacieSession",
|
||||
"s3:GetAccountPublicAccessBlock",
|
||||
"shield:DescribeProtection",
|
||||
"shield:GetSubscriptionState",
|
||||
"securityhub:BatchImportFindings",
|
||||
"securityhub:GetFindings",
|
||||
"ssm:GetDocument",
|
||||
"ssm-incidents:List*",
|
||||
"support:Describe*",
|
||||
"tag:GetTagKeys"
|
||||
],
|
||||
@@ -44,8 +34,7 @@
|
||||
"apigateway:GET"
|
||||
],
|
||||
"Resource": [
|
||||
"arn:aws:apigateway:*::/restapis/*",
|
||||
"arn:aws:apigateway:*::/apis/*"
|
||||
"arn:aws:apigateway:*::/restapis/*"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
3272
poetry.lock
generated
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from prowler.lib.banner import print_banner
|
||||
@@ -11,16 +10,15 @@ from prowler.lib.check.check import (
|
||||
exclude_checks_to_run,
|
||||
exclude_services_to_run,
|
||||
execute_checks,
|
||||
get_checks_from_input_arn,
|
||||
get_regions_from_audit_resources,
|
||||
list_categories,
|
||||
list_checks,
|
||||
list_services,
|
||||
parse_checks_from_folder,
|
||||
print_categories,
|
||||
print_checks,
|
||||
print_compliance_frameworks,
|
||||
print_compliance_requirements,
|
||||
print_services,
|
||||
remove_custom_checks_module,
|
||||
)
|
||||
from prowler.lib.check.checks_loader import load_checks_to_execute
|
||||
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
|
||||
@@ -30,18 +28,14 @@ from prowler.lib.outputs.compliance import display_compliance_table
|
||||
from prowler.lib.outputs.html import add_html_footer, fill_html_overview_statistics
|
||||
from prowler.lib.outputs.json import close_json
|
||||
from prowler.lib.outputs.outputs import extract_findings_statistics, send_to_s3_bucket
|
||||
from prowler.lib.outputs.slack import send_slack_message
|
||||
from prowler.lib.outputs.summary_table import display_summary_table
|
||||
from prowler.providers.aws.lib.allowlist.allowlist import parse_allowlist_file
|
||||
from prowler.providers.aws.lib.quick_inventory.quick_inventory import quick_inventory
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import (
|
||||
resolve_security_hub_previous_findings,
|
||||
)
|
||||
from prowler.providers.common.allowlist import set_provider_allowlist
|
||||
from prowler.providers.common.audit_info import (
|
||||
set_provider_audit_info,
|
||||
set_provider_execution_parameters,
|
||||
)
|
||||
from prowler.providers.common.audit_info import set_provider_audit_info
|
||||
from prowler.providers.common.outputs import set_provider_output_options
|
||||
from prowler.providers.common.quick_inventory import run_provider_quick_inventory
|
||||
|
||||
|
||||
def prowler():
|
||||
@@ -57,13 +51,9 @@ def prowler():
|
||||
services = args.services
|
||||
categories = args.categories
|
||||
checks_file = args.checks_file
|
||||
checks_folder = args.checks_folder
|
||||
severities = args.severity
|
||||
compliance_framework = args.compliance
|
||||
|
||||
if not args.no_banner:
|
||||
print_banner(args)
|
||||
|
||||
# We treat the compliance framework as another output format
|
||||
if compliance_framework:
|
||||
args.output_modes.extend(compliance_framework)
|
||||
@@ -71,6 +61,9 @@ def prowler():
|
||||
# Set Logger configuration
|
||||
set_logging_config(args.log_level, args.log_file, args.only_logs)
|
||||
|
||||
if not args.no_banner:
|
||||
print_banner(args)
|
||||
|
||||
if args.list_services:
|
||||
print_services(list_services(provider))
|
||||
sys.exit()
|
||||
@@ -87,31 +80,26 @@ def prowler():
|
||||
# Load compliance frameworks
|
||||
logger.debug("Loading compliance frameworks from .json files")
|
||||
|
||||
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
# Complete checks metadata with the compliance framework specification
|
||||
update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks, bulk_checks_metadata
|
||||
)
|
||||
if args.list_compliance:
|
||||
print_compliance_frameworks(bulk_compliance_frameworks)
|
||||
sys.exit()
|
||||
if args.list_compliance_requirements:
|
||||
print_compliance_requirements(
|
||||
bulk_compliance_frameworks, args.list_compliance_requirements
|
||||
# Load the compliance framework if specified with --compliance
|
||||
# If some compliance argument is specified we have to load it
|
||||
if (
|
||||
args.list_compliance
|
||||
or args.list_compliance_requirements
|
||||
or compliance_framework
|
||||
):
|
||||
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
# Complete checks metadata with the compliance framework specification
|
||||
update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks, bulk_checks_metadata
|
||||
)
|
||||
sys.exit()
|
||||
|
||||
# If -l/--list-checks passed as argument, print checks to execute and quit
|
||||
if args.list_checks:
|
||||
print_checks(provider, list_checks(provider), bulk_checks_metadata)
|
||||
sys.exit()
|
||||
|
||||
# Set the audit info based on the selected provider
|
||||
audit_info = set_provider_audit_info(provider, args.__dict__)
|
||||
|
||||
# Import custom checks from folder
|
||||
if checks_folder:
|
||||
parse_checks_from_folder(audit_info, checks_folder, provider)
|
||||
if args.list_compliance:
|
||||
print_compliance_frameworks(bulk_compliance_frameworks)
|
||||
sys.exit()
|
||||
if args.list_compliance_requirements:
|
||||
print_compliance_requirements(
|
||||
bulk_compliance_frameworks, args.list_compliance_requirements
|
||||
)
|
||||
sys.exit()
|
||||
|
||||
# Load checks to execute
|
||||
checks_to_execute = load_checks_to_execute(
|
||||
@@ -139,22 +127,37 @@ def prowler():
|
||||
# Sort final check list
|
||||
checks_to_execute = sorted(checks_to_execute)
|
||||
|
||||
# Once the audit_info is set and we have the eventual checks based on the resource identifier,
|
||||
# it is time to check what Prowler's checks are going to be executed
|
||||
# If -l/--list-checks passed as argument, print checks to execute and quit
|
||||
if args.list_checks:
|
||||
print_checks(provider, checks_to_execute, bulk_checks_metadata)
|
||||
sys.exit()
|
||||
|
||||
# Set the audit info based on the selected provider
|
||||
audit_info = set_provider_audit_info(provider, args.__dict__)
|
||||
|
||||
# Once the audit_info is set and we have the eventual checks from arn, it is time to exclude the others
|
||||
if audit_info.audit_resources:
|
||||
checks_to_execute = set_provider_execution_parameters(provider, audit_info)
|
||||
audit_info.audited_regions = get_regions_from_audit_resources(
|
||||
audit_info.audit_resources
|
||||
)
|
||||
checks_to_execute = get_checks_from_input_arn(
|
||||
audit_info.audit_resources, provider
|
||||
)
|
||||
|
||||
# Parse Allowlist
|
||||
allowlist_file = set_provider_allowlist(provider, audit_info, args)
|
||||
# Parse content from Allowlist file and get it, if necessary, from S3
|
||||
if provider == "aws" and args.allowlist_file:
|
||||
allowlist_file = parse_allowlist_file(audit_info, args.allowlist_file)
|
||||
else:
|
||||
allowlist_file = None
|
||||
|
||||
# Set output options based on the selected provider
|
||||
# Setting output options based on the selected provider
|
||||
audit_output_options = set_provider_output_options(
|
||||
provider, args, audit_info, allowlist_file, bulk_checks_metadata
|
||||
)
|
||||
|
||||
# Run the quick inventory for the provider if available
|
||||
if hasattr(args, "quick_inventory") and args.quick_inventory:
|
||||
run_provider_quick_inventory(provider, audit_info, args.output_directory)
|
||||
# Quick Inventory for AWS
|
||||
if provider == "aws" and args.quick_inventory:
|
||||
quick_inventory(audit_info, args.output_directory)
|
||||
sys.exit()
|
||||
|
||||
# Execute checks
|
||||
@@ -171,21 +174,6 @@ def prowler():
|
||||
# Extract findings stats
|
||||
stats = extract_findings_statistics(findings)
|
||||
|
||||
if args.slack:
|
||||
if "SLACK_API_TOKEN" in os.environ and "SLACK_CHANNEL_ID" in os.environ:
|
||||
_ = send_slack_message(
|
||||
os.environ["SLACK_API_TOKEN"],
|
||||
os.environ["SLACK_CHANNEL_ID"],
|
||||
stats,
|
||||
provider,
|
||||
audit_info,
|
||||
)
|
||||
else:
|
||||
logger.critical(
|
||||
"Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables (see more in https://docs.prowler.cloud/en/latest/tutorials/integrations/#slack)."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if args.output_modes:
|
||||
for mode in args.output_modes:
|
||||
# Close json file if exists
|
||||
@@ -242,10 +230,6 @@ def prowler():
|
||||
audit_output_options.output_directory,
|
||||
)
|
||||
|
||||
# If custom checks were passed, remove the modules
|
||||
if checks_folder:
|
||||
remove_custom_checks_module(checks_folder, provider)
|
||||
|
||||
# If there are failed findings exit code 3, except if -z is input
|
||||
if not args.ignore_exit_code_3 and stats["total_fail"] > 0:
|
||||
sys.exit(3)
|
||||
|
||||
@@ -362,14 +362,14 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_no_root_access_key",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_disable_90_days_credentials",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -155,8 +155,7 @@
|
||||
"Id": "1.16",
|
||||
"Description": "Ensure IAM policies that allow full \"*:*\" administrative privileges are not attached",
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -534,7 +533,6 @@
|
||||
"Id": "2.1.5",
|
||||
"Description": "Ensure that S3 Buckets are configured with 'Block public access (bucket settings)'",
|
||||
"Checks": [
|
||||
"s3_bucket_level_public_access_block",
|
||||
"s3_account_level_public_access_blocks"
|
||||
],
|
||||
"Attributes": [
|
||||
|
||||
@@ -155,8 +155,7 @@
|
||||
"Id": "1.16",
|
||||
"Description": "Ensure IAM policies that allow full \"*:*\" administrative privileges are not attached",
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -534,7 +533,6 @@
|
||||
"Id": "2.1.5",
|
||||
"Description": "Ensure that S3 Buckets are configured with 'Block public access (bucket settings)'",
|
||||
"Checks": [
|
||||
"s3_bucket_level_public_access_block",
|
||||
"s3_account_level_public_access_blocks"
|
||||
],
|
||||
"Attributes": [
|
||||
|
||||
@@ -88,8 +88,7 @@
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_no_custom_policy_permissive_role_assumption",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -182,8 +181,7 @@
|
||||
"Checks": [
|
||||
"elbv2_ssl_listeners",
|
||||
"iam_no_custom_policy_permissive_role_assumption",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -103,8 +103,7 @@
|
||||
"awslambda_function_url_public",
|
||||
"awslambda_function_url_cors_policy",
|
||||
"iam_policy_allows_privilege_escalation",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -130,8 +129,7 @@
|
||||
"Checks": [
|
||||
"iam_policy_allows_privilege_escalation",
|
||||
"iam_no_custom_policy_permissive_role_assumption",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -204,8 +202,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -1068,8 +1065,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_allows_privilege_escalation",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_custom_policy_permissive_role_assumption",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_role_cross_service_confused_deputy_prevention"
|
||||
@@ -1630,7 +1626,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port"
|
||||
"ec2_securitygroup_in_use_without_ingress_filtering"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -26,9 +26,9 @@
|
||||
"opensearch_service_domains_cloudwatch_logging_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -58,9 +58,9 @@
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
|
||||
@@ -20,8 +20,7 @@
|
||||
"guardduty_is_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -92,9 +91,9 @@
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -145,9 +144,9 @@
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
"iam_rotate_access_key_90_days",
|
||||
@@ -189,9 +188,9 @@
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -250,9 +249,9 @@
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials"
|
||||
]
|
||||
@@ -270,8 +269,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
@@ -291,8 +290,7 @@
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -976,8 +974,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -418,9 +418,9 @@
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_profile_attached",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
@@ -519,8 +519,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -536,8 +536,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -553,8 +553,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key"
|
||||
@@ -579,8 +579,8 @@
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_rotate_access_key_90_days",
|
||||
@@ -755,9 +755,9 @@
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -35,8 +35,7 @@
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
|
||||
@@ -82,9 +82,9 @@
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -166,9 +166,9 @@
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
|
||||
@@ -45,8 +45,7 @@
|
||||
"elb_ssl_listeners",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
@@ -115,8 +114,7 @@
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
@@ -171,8 +169,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials"
|
||||
]
|
||||
@@ -204,8 +201,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -259,8 +255,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -276,8 +271,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_reuse_24",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_disable_90_days_credentials",
|
||||
@@ -518,8 +512,7 @@
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
|
||||
@@ -21,8 +21,7 @@
|
||||
"ec2_instance_public_ip",
|
||||
"eks_endpoints_not_publicly_accessible",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -61,8 +60,7 @@
|
||||
"ec2_instance_public_ip",
|
||||
"eks_endpoints_not_publicly_accessible",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -127,8 +125,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials"
|
||||
]
|
||||
@@ -145,8 +142,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials"
|
||||
]
|
||||
@@ -163,8 +159,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
@@ -180,8 +175,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
@@ -455,8 +449,7 @@
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"awslambda_function_url_public",
|
||||
"rds_snapshots_public_access",
|
||||
@@ -828,8 +821,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -101,8 +101,7 @@
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"iam_password_policy_reuse_24",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
"iam_rotate_access_key_90_days",
|
||||
@@ -126,8 +125,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_url_public",
|
||||
@@ -182,8 +180,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -218,8 +215,7 @@
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_url_public",
|
||||
@@ -850,8 +846,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -19,8 +19,7 @@
|
||||
"Checks": [
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -161,8 +160,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -240,8 +238,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -277,9 +274,9 @@
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
@@ -351,8 +348,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -380,8 +376,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -409,8 +404,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -438,8 +432,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -467,8 +460,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -496,8 +488,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -525,8 +516,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -554,8 +544,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -582,8 +571,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -611,8 +599,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -645,8 +632,7 @@
|
||||
"iam_no_root_access_key",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"ec2_instance_imdsv2_enabled"
|
||||
@@ -669,8 +655,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -698,8 +683,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -727,8 +711,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -758,8 +741,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -789,8 +771,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -841,8 +822,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -886,8 +866,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -915,8 +894,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -944,8 +922,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -1069,8 +1046,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -1094,8 +1070,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -1115,8 +1091,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -1144,8 +1119,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
@@ -1163,8 +1138,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -1203,8 +1177,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
@@ -1458,8 +1432,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -2633,9 +2606,9 @@
|
||||
"ec2_instance_profile_attached",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
"iam_rotate_access_key_90_days",
|
||||
@@ -2713,8 +2686,7 @@
|
||||
"ec2_instance_profile_attached",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -2969,8 +2941,7 @@
|
||||
"ec2_ebs_default_encryption",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -3941,8 +3912,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -5413,8 +5383,7 @@
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
@@ -5457,8 +5426,7 @@
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
|
||||
@@ -569,8 +569,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_reuse_24",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_disable_90_days_credentials",
|
||||
@@ -625,8 +624,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials"
|
||||
]
|
||||
@@ -1078,8 +1076,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"awslambda_function_url_public",
|
||||
"rds_snapshots_public_access",
|
||||
|
||||
@@ -156,8 +156,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"iam_no_root_access_key",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
|
||||
@@ -113,11 +113,9 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -46,8 +46,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_disable_90_days_credentials"
|
||||
]
|
||||
},
|
||||
@@ -312,8 +311,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges"
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
### Account, Check and/or Region can be * to apply for all the cases.
|
||||
### Resources and tags are lists that can have either Regex or Keywords.
|
||||
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
|
||||
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
|
||||
### Account, Check and/or Region can be * to apply for all the cases
|
||||
### Resources is a list that can have either Regex or Keywords:
|
||||
########################### ALLOWLIST EXAMPLE ###########################
|
||||
Allowlist:
|
||||
Accounts:
|
||||
@@ -13,19 +11,11 @@ Allowlist:
|
||||
Resources:
|
||||
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
|
||||
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
|
||||
"ec2_*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*" # Will ignore every EC2 check in every account and region
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
|
||||
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
|
||||
- "test" # Will ignore every resource containing the string "test" in every account and region
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
@@ -37,14 +27,6 @@ Allowlist:
|
||||
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
|
||||
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
|
||||
- "[[:alnum:]]+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Tags:
|
||||
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
|
||||
|
||||
|
||||
# EXAMPLE: CONTROL TOWER (to migrate)
|
||||
# When using Control Tower, guardrails prevent access to certain protected resources. The allowlist
|
||||
|
||||
@@ -3,42 +3,46 @@ import pathlib
|
||||
from datetime import datetime, timezone
|
||||
from os import getcwd
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "3.5.2"
|
||||
prowler_version = "3.2.2"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
|
||||
square_logo_img = "https://user-images.githubusercontent.com/38561120/235905862-9ece5bd7-9aa3-4e48-807a-3a9035eb8bfb.png"
|
||||
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
|
||||
azure_logo = "https://user-images.githubusercontent.com/38561120/235927375-b23e2e0f-8932-49ec-b59c-d89f61c8041d.png"
|
||||
gcp_logo = "https://user-images.githubusercontent.com/38561120/235928332-eb4accdc-c226-4391-8e97-6ca86a91cf50.png"
|
||||
|
||||
orange_color = "\033[38;5;208m"
|
||||
banner_color = "\033[1;92m"
|
||||
|
||||
# Compliance
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
compliance_aws_dir = f"{actual_directory}/../compliance/aws"
|
||||
available_compliance_frameworks = []
|
||||
with os.scandir(compliance_aws_dir) as files:
|
||||
files = [
|
||||
file.name
|
||||
for file in files
|
||||
if file.is_file()
|
||||
and file.name.endswith(".json")
|
||||
and available_compliance_frameworks.append(file.name.removesuffix(".json"))
|
||||
]
|
||||
|
||||
compliance_specification_dir = "./compliance"
|
||||
available_compliance_frameworks = [
|
||||
"ens_rd2022_aws",
|
||||
"cis_1.4_aws",
|
||||
"cis_1.5_aws",
|
||||
"aws_audit_manager_control_tower_guardrails_aws",
|
||||
"aws_foundational_security_best_practices_aws",
|
||||
"cisa_aws",
|
||||
"fedramp_low_revision_4_aws",
|
||||
"fedramp_moderate_revision_4_aws",
|
||||
"ffiec_aws",
|
||||
"gdpr_aws",
|
||||
"gxp_eu_annex_11_aws",
|
||||
"gxp_21_cfr_part_11_aws",
|
||||
"hipaa_aws",
|
||||
"nist_800_53_revision_4_aws",
|
||||
"nist_800_53_revision_5_aws",
|
||||
"nist_800_171_revision_2_aws",
|
||||
"nist_csf_1.1_aws",
|
||||
"pci_3.2.1_aws",
|
||||
"rbi_cyber_security_framework_aws",
|
||||
"soc2_aws",
|
||||
]
|
||||
# AWS services-regions matrix json
|
||||
aws_services_json_file = "aws_regions_by_service.json"
|
||||
|
||||
# gcp_zones_json_file = "gcp_zones.json"
|
||||
|
||||
default_output_directory = getcwd() + "/output"
|
||||
|
||||
output_file_timestamp = timestamp.strftime("%Y%m%d%H%M%S")
|
||||
@@ -50,22 +54,6 @@ html_file_suffix = ".html"
|
||||
config_yaml = f"{pathlib.Path(os.path.dirname(os.path.realpath(__file__)))}/config.yaml"
|
||||
|
||||
|
||||
def check_current_version():
|
||||
try:
|
||||
prowler_version_string = f"Prowler {prowler_version}"
|
||||
release_response = requests.get(
|
||||
"https://api.github.com/repos/prowler-cloud/prowler/tags"
|
||||
)
|
||||
latest_version = release_response.json()[0]["name"]
|
||||
if latest_version != prowler_version:
|
||||
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
|
||||
else:
|
||||
return f"{prowler_version_string} (it is the latest version, yay!)"
|
||||
except Exception as error:
|
||||
logger.error(f"{error.__class__.__name__}: {error}")
|
||||
return f"{prowler_version_string}"
|
||||
|
||||
|
||||
def change_config_var(variable, value):
|
||||
try:
|
||||
with open(config_yaml) as f:
|
||||
|
||||
@@ -41,17 +41,3 @@ obsolete_lambda_runtimes:
|
||||
"dotnetcore2.1",
|
||||
"ruby2.5",
|
||||
]
|
||||
|
||||
# AWS Organizations
|
||||
# organizations_scp_check_deny_regions
|
||||
# organizations_enabled_regions: [
|
||||
# 'eu-central-1',
|
||||
# 'eu-west-1',
|
||||
# "us-east-1"
|
||||
# ]
|
||||
organizations_enabled_regions: []
|
||||
# organizations_delegated_administrators
|
||||
# organizations_trusted_delegated_administrators: [
|
||||
# "12345678901"
|
||||
# ]
|
||||
organizations_trusted_delegated_administrators: []
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import functools
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import traceback
|
||||
from pkgutil import walk_packages
|
||||
@@ -26,7 +24,6 @@ except KeyError:
|
||||
except Exception:
|
||||
sys.exit(1)
|
||||
|
||||
import prowler
|
||||
from prowler.lib.utils.utils import open_file, parse_json_file
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
from prowler.providers.common.outputs import Provider_Output_Options
|
||||
@@ -111,8 +108,8 @@ def exclude_services_to_run(
|
||||
# Load checks from checklist.json
|
||||
def parse_checks_from_file(input_file: str, provider: str) -> set:
|
||||
checks_to_execute = set()
|
||||
with open_file(input_file) as f:
|
||||
json_file = parse_json_file(f)
|
||||
f = open_file(input_file)
|
||||
json_file = parse_json_file(f)
|
||||
|
||||
for check_name in json_file[provider]:
|
||||
checks_to_execute.add(check_name)
|
||||
@@ -120,87 +117,16 @@ def parse_checks_from_file(input_file: str, provider: str) -> set:
|
||||
return checks_to_execute
|
||||
|
||||
|
||||
# Load checks from custom folder
|
||||
def parse_checks_from_folder(audit_info, input_folder: str, provider: str) -> int:
|
||||
try:
|
||||
imported_checks = 0
|
||||
# Check if input folder is a S3 URI
|
||||
if provider == "aws" and re.search(
|
||||
"^s3://([^/]+)/(.*?([^/]+))/$", input_folder
|
||||
):
|
||||
bucket = input_folder.split("/")[2]
|
||||
key = ("/").join(input_folder.split("/")[3:])
|
||||
s3_reource = audit_info.audit_session.resource("s3")
|
||||
bucket = s3_reource.Bucket(bucket)
|
||||
for obj in bucket.objects.filter(Prefix=key):
|
||||
if not os.path.exists(os.path.dirname(obj.key)):
|
||||
os.makedirs(os.path.dirname(obj.key))
|
||||
bucket.download_file(obj.key, obj.key)
|
||||
input_folder = key
|
||||
# Import custom checks by moving the checks folders to the corresponding services
|
||||
with os.scandir(input_folder) as checks:
|
||||
for check in checks:
|
||||
if check.is_dir():
|
||||
check_module = input_folder + "/" + check.name
|
||||
# Copy checks to specific provider/service folder
|
||||
check_service = check.name.split("_")[0]
|
||||
prowler_dir = prowler.__path__
|
||||
prowler_module = f"{prowler_dir[0]}/providers/{provider}/services/{check_service}/{check.name}"
|
||||
if os.path.exists(prowler_module):
|
||||
shutil.rmtree(prowler_module)
|
||||
shutil.copytree(check_module, prowler_module)
|
||||
imported_checks += 1
|
||||
return imported_checks
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# Load checks from custom folder
|
||||
def remove_custom_checks_module(input_folder: str, provider: str):
|
||||
# Check if input folder is a S3 URI
|
||||
s3_uri = False
|
||||
if provider == "aws" and re.search("^s3://([^/]+)/(.*?([^/]+))/$", input_folder):
|
||||
input_folder = ("/").join(input_folder.split("/")[3:])
|
||||
s3_uri = True
|
||||
|
||||
with os.scandir(input_folder) as checks:
|
||||
for check in checks:
|
||||
if check.is_dir():
|
||||
# Remove imported checks
|
||||
check_service = check.name.split("_")[0]
|
||||
prowler_dir = prowler.__path__
|
||||
prowler_module = f"{prowler_dir[0]}/providers/{provider}/services/{check_service}/{check.name}"
|
||||
if os.path.exists(prowler_module):
|
||||
shutil.rmtree(prowler_module)
|
||||
# If S3 URI, remove the downloaded folders
|
||||
if s3_uri and os.path.exists(input_folder):
|
||||
shutil.rmtree(input_folder)
|
||||
|
||||
|
||||
def list_services(provider: str) -> set():
|
||||
available_services = set()
|
||||
checks_tuple = recover_checks_from_provider(provider)
|
||||
for _, check_path in checks_tuple:
|
||||
# Format: /absolute_path/prowler/providers/{provider}/services/{service_name}/{check_name}
|
||||
if os.name == "nt":
|
||||
service_name = check_path.split("\\")[-2]
|
||||
else:
|
||||
service_name = check_path.split("/")[-2]
|
||||
service_name = check_path.split("/")[-2]
|
||||
available_services.add(service_name)
|
||||
return sorted(available_services)
|
||||
|
||||
|
||||
def list_checks(provider: str) -> set():
|
||||
available_checks = set()
|
||||
checks_tuple = recover_checks_from_provider(provider)
|
||||
for check_name, _ in checks_tuple:
|
||||
available_checks.add(check_name)
|
||||
return sorted(available_checks)
|
||||
|
||||
|
||||
def list_categories(provider: str, bulk_checks_metadata: dict) -> set():
|
||||
available_categories = set()
|
||||
for check in bulk_checks_metadata.values():
|
||||
@@ -427,22 +353,6 @@ def execute_checks(
|
||||
audit_progress=0,
|
||||
)
|
||||
|
||||
if os.name != "nt":
|
||||
try:
|
||||
from resource import RLIMIT_NOFILE, getrlimit
|
||||
|
||||
# Check ulimit for the maximum system open files
|
||||
soft, _ = getrlimit(RLIMIT_NOFILE)
|
||||
if soft < 4096:
|
||||
logger.warning(
|
||||
f"Your session file descriptors limit ({soft} open files) is below 4096. We recommend to increase it to avoid errors. Solve it running this command `ulimit -n 4096`. For more info visit https://docs.prowler.cloud/en/latest/troubleshooting/"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error("Unable to retrieve ulimit default settings")
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
# Execution with the --only-logs flag
|
||||
if audit_output_options.only_logs:
|
||||
for check_name in checks_to_execute:
|
||||
@@ -594,3 +504,68 @@ def recover_checks_from_service(service_list: list, provider: str) -> list:
|
||||
# if service_name in group_list: checks_from_arn.add(check_name)
|
||||
checks.add(check_name)
|
||||
return checks
|
||||
|
||||
|
||||
def get_checks_from_input_arn(audit_resources: list, provider: str) -> set:
|
||||
"""get_checks_from_input_arn gets the list of checks from the input arns"""
|
||||
checks_from_arn = set()
|
||||
# Handle if there are audit resources so only their services are executed
|
||||
if audit_resources:
|
||||
services_without_subservices = ["guardduty", "kms", "s3", "elb"]
|
||||
service_list = set()
|
||||
sub_service_list = set()
|
||||
for resource in audit_resources:
|
||||
service = resource.split(":")[2]
|
||||
sub_service = resource.split(":")[5].split("/")[0].replace("-", "_")
|
||||
|
||||
if (
|
||||
service != "wafv2" and service != "waf"
|
||||
): # WAF Services does not have checks
|
||||
# Parse services when they are different in the ARNs
|
||||
if service == "lambda":
|
||||
service = "awslambda"
|
||||
if service == "elasticloadbalancing":
|
||||
service = "elb"
|
||||
elif service == "logs":
|
||||
service = "cloudwatch"
|
||||
service_list.add(service)
|
||||
|
||||
# Get subservices to execute only applicable checks
|
||||
if service not in services_without_subservices:
|
||||
# Parse some specific subservices
|
||||
if service == "ec2":
|
||||
if sub_service == "security_group":
|
||||
sub_service = "securitygroup"
|
||||
if sub_service == "network_acl":
|
||||
sub_service = "networkacl"
|
||||
if sub_service == "image":
|
||||
sub_service = "ami"
|
||||
if service == "rds":
|
||||
if sub_service == "cluster_snapshot":
|
||||
sub_service = "snapshot"
|
||||
sub_service_list.add(sub_service)
|
||||
else:
|
||||
sub_service_list.add(service)
|
||||
|
||||
checks = recover_checks_from_service(service_list, provider)
|
||||
|
||||
# Filter only checks with audited subservices
|
||||
for check in checks:
|
||||
if any(sub_service in check for sub_service in sub_service_list):
|
||||
if not (sub_service == "policy" and "password_policy" in check):
|
||||
checks_from_arn.add(check)
|
||||
|
||||
# Return final checks list
|
||||
return sorted(checks_from_arn)
|
||||
|
||||
|
||||
def get_regions_from_audit_resources(audit_resources: list) -> list:
|
||||
"""get_regions_from_audit_resources gets the regions from the audit resources arns"""
|
||||
audited_regions = []
|
||||
for resource in audit_resources:
|
||||
region = resource.split(":")[3]
|
||||
if region and region not in audited_regions: # Check if arn has a region
|
||||
audited_regions.append(region)
|
||||
if audited_regions:
|
||||
return audited_regions
|
||||
return None
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
import sys
|
||||
|
||||
from pydantic import parse_obj_as
|
||||
|
||||
from prowler.lib.check.compliance_models import (
|
||||
Compliance_Base_Model,
|
||||
Compliance_Requirement,
|
||||
)
|
||||
from prowler.lib.check.models import Check_Metadata_Model
|
||||
from prowler.lib.check.models import Check_Report_AWS
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
@@ -64,33 +62,44 @@ def update_checks_metadata_with_compliance(
|
||||
# Include the compliance framework for the check
|
||||
check_compliance.append(compliance)
|
||||
# Create metadata for Manual Control
|
||||
manual_check_metadata = {
|
||||
"Provider": "aws",
|
||||
"CheckID": "manual_check",
|
||||
"CheckTitle": "Manual Check",
|
||||
"CheckType": [],
|
||||
"ServiceName": "",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "",
|
||||
"ResourceType": "",
|
||||
"Description": "",
|
||||
"Risk": "",
|
||||
"RelatedUrl": "",
|
||||
manual_check_metadata = """{
|
||||
"Provider" : "aws",
|
||||
"CheckID" : "manual_check",
|
||||
"CheckTitle" : "Manual Check",
|
||||
"CheckType" : [],
|
||||
"ServiceName" : "",
|
||||
"SubServiceName" : "",
|
||||
"ResourceIdTemplate" : "",
|
||||
"Severity" : "",
|
||||
"ResourceType" : "",
|
||||
"Description" : "",
|
||||
"Risk" : "",
|
||||
"RelatedUrl" : "",
|
||||
"Remediation": {
|
||||
"Code": {"CLI": "", "NativeIaC": "", "Other": "", "Terraform": ""},
|
||||
"Recommendation": {"Text": "", "Url": ""},
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "",
|
||||
"Url": ""
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
}
|
||||
manual_check = parse_obj_as(Check_Metadata_Model, manual_check_metadata)
|
||||
"Categories" : [],
|
||||
"Tags" : {},
|
||||
"DependsOn" : [],
|
||||
"RelatedTo" : [],
|
||||
"Notes" : ""
|
||||
}"""
|
||||
manual_check = Check_Report_AWS(manual_check_metadata)
|
||||
manual_check.status = "INFO"
|
||||
manual_check.status_extended = "Manual check"
|
||||
manual_check.resource_id = "manual_check"
|
||||
manual_check.Compliance = check_compliance
|
||||
# Save it into the check's metadata
|
||||
bulk_checks_metadata["manual_check"] = manual_check
|
||||
bulk_checks_metadata["manual_check"].Compliance = check_compliance
|
||||
|
||||
return bulk_checks_metadata
|
||||
except Exception as e:
|
||||
|
||||
@@ -48,6 +48,7 @@ class Check_Metadata_Model(BaseModel):
|
||||
RelatedUrl: str
|
||||
Remediation: Remediation
|
||||
Categories: list[str]
|
||||
Tags: dict
|
||||
DependsOn: list[str]
|
||||
RelatedTo: list[str]
|
||||
Notes: str
|
||||
@@ -128,23 +129,6 @@ class Check_Report_Azure(Check_Report):
|
||||
self.subscription = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class Check_Report_GCP(Check_Report):
|
||||
"""Contains the GCP Check's finding information."""
|
||||
|
||||
resource_name: str
|
||||
resource_id: str
|
||||
project_id: str
|
||||
location: str
|
||||
|
||||
def __init__(self, metadata):
|
||||
super().__init__(metadata)
|
||||
self.resource_name = ""
|
||||
self.resource_id = ""
|
||||
self.project_id = ""
|
||||
self.location = ""
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_check_metadata(metadata_file: str) -> Check_Metadata_Model:
|
||||
"""load_check_metadata loads and parse a Check's metadata file"""
|
||||
|
||||
@@ -4,8 +4,8 @@ from argparse import RawTextHelpFormatter
|
||||
|
||||
from prowler.config.config import (
|
||||
available_compliance_frameworks,
|
||||
check_current_version,
|
||||
default_output_directory,
|
||||
prowler_version,
|
||||
)
|
||||
from prowler.providers.aws.aws_provider import get_aws_available_regions
|
||||
from prowler.providers.aws.lib.arn.arn import is_valid_arn
|
||||
@@ -35,7 +35,8 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
self.parser.add_argument(
|
||||
"-v",
|
||||
"--version",
|
||||
action="store_true",
|
||||
action="version",
|
||||
version=f"Prowler {prowler_version}",
|
||||
help="show Prowler version",
|
||||
)
|
||||
# Common arguments parser
|
||||
@@ -56,7 +57,6 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
# Init Providers Arguments
|
||||
self.__init_aws_parser__()
|
||||
self.__init_azure_parser__()
|
||||
self.__init_gcp_parser__()
|
||||
|
||||
def parse(self, args=None) -> argparse.Namespace:
|
||||
"""
|
||||
@@ -66,10 +66,6 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
if args:
|
||||
sys.argv = args
|
||||
|
||||
if len(sys.argv) == 2 and sys.argv[1] in ("-v", "--version"):
|
||||
print(check_current_version())
|
||||
sys.exit(0)
|
||||
|
||||
# Set AWS as the default provider if no provider is supplied
|
||||
if len(sys.argv) == 1:
|
||||
sys.argv = self.__set_default_provider__(sys.argv)
|
||||
@@ -154,11 +150,6 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
common_outputs_parser.add_argument(
|
||||
"-b", "--no-banner", action="store_true", help="Hide Prowler banner"
|
||||
)
|
||||
common_outputs_parser.add_argument(
|
||||
"--slack",
|
||||
action="store_true",
|
||||
help="Send a summary of the execution with a Slack APP in your channel. Environment variables SLACK_API_TOKEN and SLACK_CHANNEL_ID are required (see more in https://docs.prowler.cloud/en/latest/tutorials/integrations/#slack).",
|
||||
)
|
||||
|
||||
def __init_logging_parser__(self):
|
||||
# Logging Options
|
||||
@@ -233,12 +224,6 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
default=[],
|
||||
# Pending validate choices
|
||||
)
|
||||
common_checks_parser.add_argument(
|
||||
"-x",
|
||||
"--checks-folder",
|
||||
nargs="?",
|
||||
help="Specify external directory with custom checks (each check must have a folder with the required files, see more in https://docs.prowler.cloud/en/latest/tutorials/misc/#custom-checks).",
|
||||
)
|
||||
|
||||
def __init_list_checks_parser__(self):
|
||||
# List checks options
|
||||
@@ -445,18 +430,3 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
default=[],
|
||||
help="Azure subscription ids to be scanned by prowler",
|
||||
)
|
||||
|
||||
def __init_gcp_parser__(self):
|
||||
"""Init the GCP Provider CLI parser"""
|
||||
gcp_parser = self.subparsers.add_parser(
|
||||
"gcp", parents=[self.common_providers_parser], help="GCP Provider"
|
||||
)
|
||||
# Authentication Modes
|
||||
gcp_auth_subparser = gcp_parser.add_argument_group("Authentication Modes")
|
||||
gcp_auth_modes_group = gcp_auth_subparser.add_mutually_exclusive_group()
|
||||
gcp_auth_modes_group.add_argument(
|
||||
"--credentials-file",
|
||||
nargs="?",
|
||||
metavar="FILE_PATH",
|
||||
help="Authenticate using a Google Service Account Application Credentials JSON file",
|
||||
)
|
||||
|
||||
@@ -5,7 +5,6 @@ from colorama import Fore, Style
|
||||
from tabulate import tabulate
|
||||
|
||||
from prowler.config.config import orange_color, timestamp
|
||||
from prowler.lib.check.models import Check_Report
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.models import (
|
||||
Check_Output_CSV_CIS,
|
||||
@@ -19,13 +18,7 @@ def add_manual_controls(output_options, audit_info, file_descriptors):
|
||||
try:
|
||||
# Check if MANUAL control was already added to output
|
||||
if "manual_check" in output_options.bulk_checks_metadata:
|
||||
manual_finding = Check_Report(
|
||||
output_options.bulk_checks_metadata["manual_check"].json()
|
||||
)
|
||||
manual_finding.status = "INFO"
|
||||
manual_finding.status_extended = "Manual check"
|
||||
manual_finding.resource_id = "manual_check"
|
||||
manual_finding.region = ""
|
||||
manual_finding = output_options.bulk_checks_metadata["manual_check"]
|
||||
fill_compliance(
|
||||
output_options, manual_finding, audit_info, file_descriptors
|
||||
)
|
||||
|
||||
@@ -16,13 +16,11 @@ from prowler.lib.outputs.models import (
|
||||
Check_Output_CSV_CIS,
|
||||
Check_Output_CSV_ENS_RD2022,
|
||||
Check_Output_CSV_Generic_Compliance,
|
||||
Gcp_Check_Output_CSV,
|
||||
generate_csv_fields,
|
||||
)
|
||||
from prowler.lib.utils.utils import file_exists, open_file
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
|
||||
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
|
||||
|
||||
|
||||
def initialize_file_descriptor(
|
||||
@@ -84,13 +82,6 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
audit_info,
|
||||
Azure_Check_Output_CSV,
|
||||
)
|
||||
if isinstance(audit_info, GCP_Audit_Info):
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Gcp_Check_Output_CSV,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "json":
|
||||
@@ -100,13 +91,6 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "html":
|
||||
filename = f"{output_directory}/{output_filename}{html_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif isinstance(audit_info, AWS_Audit_Info):
|
||||
if output_mode == "json-asff":
|
||||
filename = f"{output_directory}/{output_filename}{json_asff_file_suffix}"
|
||||
@@ -115,6 +99,15 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "html":
|
||||
filename = (
|
||||
f"{output_directory}/{output_filename}{html_file_suffix}"
|
||||
)
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "ens_rd2022_aws":
|
||||
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import importlib
|
||||
import sys
|
||||
from os import path
|
||||
|
||||
@@ -9,22 +8,20 @@ from prowler.config.config import (
|
||||
prowler_version,
|
||||
timestamp,
|
||||
)
|
||||
from prowler.lib.check.models import Check_Report_AWS, Check_Report_GCP
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.models import (
|
||||
get_check_compliance,
|
||||
parse_html_string,
|
||||
unroll_dict,
|
||||
unroll_tags,
|
||||
)
|
||||
from prowler.lib.utils.utils import open_file
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
|
||||
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
|
||||
|
||||
|
||||
def add_html_header(file_descriptor, audit_info):
|
||||
try:
|
||||
if not audit_info.profile:
|
||||
audit_info.profile = "ENV"
|
||||
if isinstance(audit_info.audited_regions, list):
|
||||
audited_regions = " ".join(audit_info.audited_regions)
|
||||
elif not audit_info.audited_regions:
|
||||
audited_regions = "All Regions"
|
||||
else:
|
||||
audited_regions = audit_info.audited_regions
|
||||
file_descriptor.write(
|
||||
"""
|
||||
<!DOCTYPE html>
|
||||
@@ -111,9 +108,51 @@ def add_html_header(file_descriptor, audit_info):
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div> """
|
||||
+ get_assessment_summary(audit_info)
|
||||
</div>
|
||||
<div class="col-md-2">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
AWS Assessment Summary
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>AWS Account:</b> """
|
||||
+ audit_info.audited_account
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>AWS-CLI Profile:</b> """
|
||||
+ audit_info.profile
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Audited Regions:</b> """
|
||||
+ audited_regions
|
||||
+ """
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
AWS Credentials
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>User Id:</b> """
|
||||
+ audit_info.audited_user_id
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Caller Identity ARN:</b>
|
||||
"""
|
||||
+ audit_info.audited_identity_arn
|
||||
+ """
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-2">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
@@ -144,60 +183,53 @@ def add_html_header(file_descriptor, audit_info):
|
||||
<tr>
|
||||
<th scope="col">Status</th>
|
||||
<th scope="col">Severity</th>
|
||||
<th scope="col">Service Name</th>
|
||||
<th style="width:5%" scope="col">Service Name</th>
|
||||
<th scope="col">Region</th>
|
||||
<th style="width:20%" scope="col">Check ID</th>
|
||||
<th style="width:20%" scope="col">Check Title</th>
|
||||
<th scope="col">Resource ID</th>
|
||||
<th scope="col">Resource Tags</th>
|
||||
<th style="width:15%" scope="col">Check Description</th>
|
||||
<th scope="col">Check ID</th>
|
||||
<th scope="col">Status Extended</th>
|
||||
<th scope="col">Risk</th>
|
||||
<th scope="col">Recomendation</th>
|
||||
<th scope="col">Compliance</th>
|
||||
<th style="width:5%" scope="col">Recomendation URL</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
"""
|
||||
)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def fill_html(file_descriptor, finding, output_options):
|
||||
try:
|
||||
row_class = "p-3 mb-2 bg-success-custom"
|
||||
if finding.status == "INFO":
|
||||
row_class = "table-info"
|
||||
elif finding.status == "FAIL":
|
||||
row_class = "table-danger"
|
||||
elif finding.status == "WARNING":
|
||||
row_class = "table-warning"
|
||||
file_descriptor.write(
|
||||
f"""
|
||||
<tr class="{row_class}">
|
||||
<td>{finding.status}</td>
|
||||
<td>{finding.check_metadata.Severity}</td>
|
||||
<td>{finding.check_metadata.ServiceName}</td>
|
||||
<td>{finding.location if isinstance(finding, Check_Report_GCP) else finding.region if isinstance(finding, Check_Report_AWS) else ""}</td>
|
||||
<td>{finding.check_metadata.CheckID.replace("_", "<wbr>_")}</td>
|
||||
<td>{finding.check_metadata.CheckTitle}</td>
|
||||
<td>{finding.resource_id.replace("<", "<").replace(">", ">").replace("_", "<wbr>_")}</td>
|
||||
<td>{parse_html_string(unroll_tags(finding.resource_tags))}</td>
|
||||
<td>{finding.status_extended.replace("<", "<").replace(">", ">").replace("_", "<wbr>_")}</td>
|
||||
<td><p class="show-read-more">{finding.check_metadata.Risk}</p></td>
|
||||
<td><p class="show-read-more">{finding.check_metadata.Remediation.Recommendation.Text}</p> <a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
|
||||
<td><p class="show-read-more">{parse_html_string(unroll_dict(get_check_compliance(finding, finding.check_metadata.Provider, output_options)))}</p></td>
|
||||
</tr>
|
||||
"""
|
||||
)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
def fill_html(file_descriptor, finding):
|
||||
row_class = "p-3 mb-2 bg-success-custom"
|
||||
if finding.status == "INFO":
|
||||
row_class = "table-info"
|
||||
elif finding.status == "FAIL":
|
||||
row_class = "table-danger"
|
||||
elif finding.status == "WARNING":
|
||||
row_class = "table-warning"
|
||||
file_descriptor.write(
|
||||
f"""
|
||||
<tr class="{row_class}">
|
||||
<td>{finding.status}</td>
|
||||
<td>{finding.check_metadata.Severity}</td>
|
||||
<td>{finding.check_metadata.ServiceName}</td>
|
||||
<td>{finding.region}</td>
|
||||
<td>{finding.check_metadata.CheckTitle}</td>
|
||||
<td>{finding.resource_id.replace("<", "<").replace(">", ">").replace("_", "<wbr>_")}</td>
|
||||
<td>{finding.check_metadata.Description}</td>
|
||||
<td>{finding.check_metadata.CheckID.replace("_", "<wbr>_")}</td>
|
||||
<td>{finding.status_extended.replace("<", "<").replace(">", ">").replace("_", "<wbr>_")}</td>
|
||||
<td><p class="show-read-more">{finding.check_metadata.Risk}</p></td>
|
||||
<td><p class="show-read-more">{finding.check_metadata.Remediation.Recommendation.Text}</p></td>
|
||||
<td><a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
|
||||
</tr>
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def fill_html_overview_statistics(stats, output_filename, output_directory):
|
||||
@@ -333,207 +365,3 @@ def add_html_footer(output_filename, output_directory):
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_aws_html_assessment_summary(audit_info):
|
||||
try:
|
||||
if isinstance(audit_info, AWS_Audit_Info):
|
||||
if not audit_info.profile:
|
||||
audit_info.profile = "ENV"
|
||||
if isinstance(audit_info.audited_regions, list):
|
||||
audited_regions = " ".join(audit_info.audited_regions)
|
||||
elif not audit_info.audited_regions:
|
||||
audited_regions = "All Regions"
|
||||
else:
|
||||
audited_regions = audit_info.audited_regions
|
||||
return (
|
||||
"""
|
||||
<div class="col-md-2">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
AWS Assessment Summary
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>AWS Account:</b> """
|
||||
+ audit_info.audited_account
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>AWS-CLI Profile:</b> """
|
||||
+ audit_info.profile
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Audited Regions:</b> """
|
||||
+ audited_regions
|
||||
+ """
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
AWS Credentials
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>User Id:</b> """
|
||||
+ audit_info.audited_user_id
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Caller Identity ARN:</b> """
|
||||
+ audit_info.audited_identity_arn
|
||||
+ """
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_azure_html_assessment_summary(audit_info):
|
||||
try:
|
||||
if isinstance(audit_info, Azure_Audit_Info):
|
||||
printed_subscriptions = []
|
||||
for key, value in audit_info.identity.subscriptions.items():
|
||||
intermediate = key + " : " + value
|
||||
printed_subscriptions.append(intermediate)
|
||||
return (
|
||||
"""
|
||||
<div class="col-md-2">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
Azure Assessment Summary
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>Azure Tenant IDs:</b> """
|
||||
+ " ".join(audit_info.identity.tenant_ids)
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Azure Tenant Domain:</b> """
|
||||
+ audit_info.identity.domain
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Azure Subscriptions:</b> """
|
||||
+ " ".join(printed_subscriptions)
|
||||
+ """
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
Azure Credentials
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>Azure Identity Type:</b> """
|
||||
+ audit_info.identity.identity_type
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Azure Identity ID:</b> """
|
||||
+ audit_info.identity.identity_id
|
||||
+ """
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_gcp_html_assessment_summary(audit_info):
|
||||
try:
|
||||
if isinstance(audit_info, GCP_Audit_Info):
|
||||
try:
|
||||
getattr(audit_info.credentials, "_service_account_email")
|
||||
profile = (
|
||||
audit_info.credentials._service_account_email
|
||||
if audit_info.credentials._service_account_email is not None
|
||||
else "default"
|
||||
)
|
||||
except AttributeError:
|
||||
profile = "default"
|
||||
return (
|
||||
"""
|
||||
<div class="col-md-2">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
GCP Assessment Summary
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>GCP Project ID:</b> """
|
||||
+ audit_info.project_id
|
||||
+ """
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
GCP Credentials
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>GCP Account:</b> """
|
||||
+ profile
|
||||
+ """
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_assessment_summary(audit_info):
|
||||
"""
|
||||
get_assessment_summary gets the HTML assessment summary for the provider
|
||||
"""
|
||||
try:
|
||||
# This is based in the Provider_Audit_Info class
|
||||
# It is not pretty but useful
|
||||
# AWS_Audit_Info --> aws
|
||||
# GCP_Audit_Info --> gcp
|
||||
# Azure_Audit_Info --> azure
|
||||
provider = audit_info.__class__.__name__.split("_")[0].lower()
|
||||
|
||||
# Dynamically get the Provider quick inventory handler
|
||||
provider_html_assessment_summary_function = (
|
||||
f"get_{provider}_html_assessment_summary"
|
||||
)
|
||||
return getattr(
|
||||
importlib.import_module(__name__), provider_html_assessment_summary_function
|
||||
)(audit_info)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
@@ -8,17 +8,11 @@ from prowler.config.config import (
|
||||
timestamp_utc,
|
||||
)
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.models import (
|
||||
Compliance,
|
||||
ProductFields,
|
||||
Resource,
|
||||
Severity,
|
||||
get_check_compliance,
|
||||
)
|
||||
from prowler.lib.outputs.models import Compliance, ProductFields, Resource, Severity
|
||||
from prowler.lib.utils.utils import hash_sha512, open_file
|
||||
|
||||
|
||||
def fill_json_asff(finding_output, audit_info, finding, output_options):
|
||||
def fill_json_asff(finding_output, audit_info, finding):
|
||||
# Check if there are no resources in the finding
|
||||
if finding.resource_arn == "":
|
||||
if finding.resource_id == "":
|
||||
@@ -46,22 +40,14 @@ def fill_json_asff(finding_output, audit_info, finding, output_options):
|
||||
Region=finding.region,
|
||||
)
|
||||
]
|
||||
# Iterate for each compliance framework
|
||||
compliance_summary = []
|
||||
associated_standards = []
|
||||
check_compliance = get_check_compliance(finding, "aws", output_options)
|
||||
for key, value in check_compliance.items():
|
||||
associated_standards.append({"StandardsId": key})
|
||||
item = f"{key} {' '.join(value)}"
|
||||
if len(item) > 64:
|
||||
item = item[0:63]
|
||||
compliance_summary.append(item)
|
||||
|
||||
# Check if any Requirement has > 64 characters
|
||||
check_types = []
|
||||
for type in finding.check_metadata.CheckType:
|
||||
check_types.extend(type.split("/"))
|
||||
# Add ED to PASS or FAIL (PASSED/FAILED)
|
||||
finding_output.Compliance = Compliance(
|
||||
Status=finding.status + "ED",
|
||||
AssociatedStandards=associated_standards,
|
||||
RelatedRequirements=compliance_summary,
|
||||
RelatedRequirements=check_types,
|
||||
)
|
||||
finding_output.Remediation = {
|
||||
"Recommendation": finding.check_metadata.Remediation.Recommendation
|
||||
|
||||
@@ -11,37 +11,13 @@ from prowler.lib.logger import logger
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info
|
||||
|
||||
|
||||
def get_check_compliance(finding, provider, output_options):
|
||||
try:
|
||||
check_compliance = {}
|
||||
# We have to retrieve all the check's compliance requirements
|
||||
if finding.check_metadata.CheckID in output_options.bulk_checks_metadata:
|
||||
for compliance in output_options.bulk_checks_metadata[
|
||||
finding.check_metadata.CheckID
|
||||
].Compliance:
|
||||
compliance_fw = compliance.Framework
|
||||
if compliance.Version:
|
||||
compliance_fw = f"{compliance_fw}-{compliance.Version}"
|
||||
if compliance.Provider == provider.upper():
|
||||
if compliance_fw not in check_compliance:
|
||||
check_compliance[compliance_fw] = []
|
||||
for requirement in compliance.Requirements:
|
||||
check_compliance[compliance_fw].append(requirement.Id)
|
||||
return check_compliance
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def generate_provider_output_csv(
|
||||
provider: str, finding, audit_info, mode: str, fd, output_options
|
||||
):
|
||||
def generate_provider_output_csv(provider: str, finding, audit_info, mode: str, fd):
|
||||
"""
|
||||
set_provider_output_options configures automatically the outputs based on the selected provider and returns the Provider_Output_Options object.
|
||||
"""
|
||||
try:
|
||||
finding_output_model = f"{provider.capitalize()}_Check_Output_{mode.upper()}"
|
||||
output_model = getattr(importlib.import_module(__name__), finding_output_model)
|
||||
# Dynamically load the Provider_Output_Options class
|
||||
finding_output_model = f"{provider.capitalize()}_Check_Output_{mode.upper()}"
|
||||
output_model = getattr(importlib.import_module(__name__), finding_output_model)
|
||||
@@ -56,22 +32,6 @@ def generate_provider_output_csv(
|
||||
data[
|
||||
"finding_unique_id"
|
||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.subscription}-{finding.resource_id}"
|
||||
data["compliance"] = unroll_dict(
|
||||
get_check_compliance(finding, provider, output_options)
|
||||
)
|
||||
finding_output = output_model(**data)
|
||||
|
||||
if provider == "gcp":
|
||||
data["resource_id"] = finding.resource_id
|
||||
data["resource_name"] = finding.resource_name
|
||||
data["project_id"] = finding.project_id
|
||||
data["location"] = finding.location
|
||||
data[
|
||||
"finding_unique_id"
|
||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
|
||||
data["compliance"] = unroll_dict(
|
||||
get_check_compliance(finding, provider, output_options)
|
||||
)
|
||||
finding_output = output_model(**data)
|
||||
|
||||
if provider == "aws":
|
||||
@@ -83,9 +43,6 @@ def generate_provider_output_csv(
|
||||
data[
|
||||
"finding_unique_id"
|
||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{finding.resource_id}"
|
||||
data["compliance"] = unroll_dict(
|
||||
get_check_compliance(finding, provider, output_options)
|
||||
)
|
||||
finding_output = output_model(**data)
|
||||
|
||||
if audit_info.organizations_metadata:
|
||||
@@ -134,7 +91,7 @@ def fill_common_data_csv(finding: dict) -> dict:
|
||||
"severity": finding.check_metadata.Severity,
|
||||
"resource_type": finding.check_metadata.ResourceType,
|
||||
"resource_details": finding.resource_details,
|
||||
"resource_tags": unroll_tags(finding.resource_tags),
|
||||
"resource_tags": finding.resource_tags,
|
||||
"description": finding.check_metadata.Description,
|
||||
"risk": finding.check_metadata.Risk,
|
||||
"related_url": finding.check_metadata.RelatedUrl,
|
||||
@@ -156,99 +113,26 @@ def fill_common_data_csv(finding: dict) -> dict:
|
||||
"remediation_recommendation_code_other": (
|
||||
finding.check_metadata.Remediation.Code.Other
|
||||
),
|
||||
"categories": unroll_list(finding.check_metadata.Categories),
|
||||
"depends_on": unroll_list(finding.check_metadata.DependsOn),
|
||||
"related_to": unroll_list(finding.check_metadata.RelatedTo),
|
||||
"categories": __unroll_list__(finding.check_metadata.Categories),
|
||||
"depends_on": __unroll_list__(finding.check_metadata.DependsOn),
|
||||
"related_to": __unroll_list__(finding.check_metadata.RelatedTo),
|
||||
"notes": finding.check_metadata.Notes,
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
def unroll_list(listed_items: list):
|
||||
def __unroll_list__(listed_items: list):
|
||||
unrolled_items = ""
|
||||
separator = "|"
|
||||
if listed_items:
|
||||
for item in listed_items:
|
||||
if not unrolled_items:
|
||||
unrolled_items = f"{item}"
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items} {separator} {item}"
|
||||
|
||||
return unrolled_items
|
||||
|
||||
|
||||
def unroll_tags(tags: list):
|
||||
unrolled_items = ""
|
||||
separator = "|"
|
||||
if tags and tags != [{}] and tags != [None]:
|
||||
for item in tags:
|
||||
# Check if there are tags in list
|
||||
if type(item) == dict:
|
||||
for key, value in item.items():
|
||||
if not unrolled_items:
|
||||
# Check the pattern of tags (Key:Value or Key:key/Value:value)
|
||||
if "Key" != key and "Value" != key:
|
||||
unrolled_items = f"{key}={value}"
|
||||
else:
|
||||
if "Key" == key:
|
||||
unrolled_items = f"{value}="
|
||||
else:
|
||||
unrolled_items = f"{value}"
|
||||
else:
|
||||
if "Key" != key and "Value" != key:
|
||||
unrolled_items = (
|
||||
f"{unrolled_items} {separator} {key}={value}"
|
||||
)
|
||||
else:
|
||||
if "Key" == key:
|
||||
unrolled_items = (
|
||||
f"{unrolled_items} {separator} {value}="
|
||||
)
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items}{value}"
|
||||
elif not unrolled_items:
|
||||
unrolled_items = f"{item}"
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items} {separator} {item}"
|
||||
|
||||
return unrolled_items
|
||||
|
||||
|
||||
def unroll_dict(dict: dict):
|
||||
unrolled_items = ""
|
||||
separator = "|"
|
||||
for key, value in dict.items():
|
||||
if type(value) == list:
|
||||
value = ", ".join(value)
|
||||
for item in listed_items:
|
||||
if not unrolled_items:
|
||||
unrolled_items = f"{key}: {value}"
|
||||
unrolled_items = f"{item}"
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items} {separator} {key}: {value}"
|
||||
unrolled_items = f"{unrolled_items}{separator}{item}"
|
||||
|
||||
return unrolled_items
|
||||
|
||||
|
||||
def parse_html_string(str: str):
|
||||
string = ""
|
||||
for elem in str.split(" | "):
|
||||
if elem:
|
||||
string += f"\n•{elem}\n"
|
||||
|
||||
return string
|
||||
|
||||
|
||||
def parse_json_tags(tags: list):
|
||||
dict_tags = {}
|
||||
if tags and tags != [{}] and tags != [None]:
|
||||
for tag in tags:
|
||||
if "Key" in tag and "Value" in tag:
|
||||
dict_tags[tag["Key"]] = tag["Value"]
|
||||
else:
|
||||
dict_tags.update(tag)
|
||||
|
||||
return dict_tags
|
||||
|
||||
|
||||
def generate_csv_fields(format: Any) -> list[str]:
|
||||
"""Generates the CSV headers for the given class"""
|
||||
csv_fields = []
|
||||
@@ -278,7 +162,7 @@ class Check_Output_CSV(BaseModel):
|
||||
severity: str
|
||||
resource_type: str
|
||||
resource_details: str
|
||||
resource_tags: str
|
||||
resource_tags: list
|
||||
description: str
|
||||
risk: str
|
||||
related_url: str
|
||||
@@ -288,7 +172,6 @@ class Check_Output_CSV(BaseModel):
|
||||
remediation_recommendation_code_terraform: str
|
||||
remediation_recommendation_code_cli: str
|
||||
remediation_recommendation_code_other: str
|
||||
compliance: str
|
||||
categories: str
|
||||
depends_on: str
|
||||
related_to: str
|
||||
@@ -323,20 +206,7 @@ class Azure_Check_Output_CSV(Check_Output_CSV):
|
||||
resource_name: str = ""
|
||||
|
||||
|
||||
class Gcp_Check_Output_CSV(Check_Output_CSV):
|
||||
"""
|
||||
Gcp_Check_Output_CSV generates a finding's output in CSV format for the GCP provider.
|
||||
"""
|
||||
|
||||
project_id: str = ""
|
||||
location: str = ""
|
||||
resource_id: str = ""
|
||||
resource_name: str = ""
|
||||
|
||||
|
||||
def generate_provider_output_json(
|
||||
provider: str, finding, audit_info, mode: str, output_options
|
||||
):
|
||||
def generate_provider_output_json(provider: str, finding, audit_info, mode: str, fd):
|
||||
"""
|
||||
generate_provider_output_json configures automatically the outputs based on the selected provider and returns the Check_Output_JSON object.
|
||||
"""
|
||||
@@ -358,19 +228,6 @@ def generate_provider_output_json(
|
||||
finding_output.ResourceId = finding.resource_id
|
||||
finding_output.ResourceName = finding.resource_name
|
||||
finding_output.FindingUniqueId = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.subscription}-{finding.resource_id}"
|
||||
finding_output.Compliance = get_check_compliance(
|
||||
finding, provider, output_options
|
||||
)
|
||||
|
||||
if provider == "gcp":
|
||||
finding_output.ProjectId = audit_info.project_id
|
||||
finding_output.Location = finding.location
|
||||
finding_output.ResourceId = finding.resource_id
|
||||
finding_output.ResourceName = finding.resource_name
|
||||
finding_output.FindingUniqueId = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
|
||||
finding_output.Compliance = get_check_compliance(
|
||||
finding, provider, output_options
|
||||
)
|
||||
|
||||
if provider == "aws":
|
||||
finding_output.Profile = audit_info.profile
|
||||
@@ -378,11 +235,7 @@ def generate_provider_output_json(
|
||||
finding_output.Region = finding.region
|
||||
finding_output.ResourceId = finding.resource_id
|
||||
finding_output.ResourceArn = finding.resource_arn
|
||||
finding_output.ResourceTags = parse_json_tags(finding.resource_tags)
|
||||
finding_output.FindingUniqueId = f"prowler-{provider}-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{finding.resource_id}"
|
||||
finding_output.Compliance = get_check_compliance(
|
||||
finding, provider, output_options
|
||||
)
|
||||
|
||||
if audit_info.organizations_metadata:
|
||||
finding_output.OrganizationsInfo = (
|
||||
@@ -418,11 +271,11 @@ class Check_Output_JSON(BaseModel):
|
||||
Severity: str
|
||||
ResourceType: str
|
||||
ResourceDetails: str = ""
|
||||
Tags: dict
|
||||
Description: str
|
||||
Risk: str
|
||||
RelatedUrl: str
|
||||
Remediation: Remediation
|
||||
Compliance: Optional[dict]
|
||||
Categories: List[str]
|
||||
DependsOn: List[str]
|
||||
RelatedTo: List[str]
|
||||
@@ -440,7 +293,6 @@ class Aws_Check_Output_JSON(Check_Output_JSON):
|
||||
Region: str = ""
|
||||
ResourceId: str = ""
|
||||
ResourceArn: str = ""
|
||||
ResourceTags: list = []
|
||||
|
||||
def __init__(self, **metadata):
|
||||
super().__init__(**metadata)
|
||||
@@ -448,7 +300,7 @@ class Aws_Check_Output_JSON(Check_Output_JSON):
|
||||
|
||||
class Azure_Check_Output_JSON(Check_Output_JSON):
|
||||
"""
|
||||
Azure_Check_Output_JSON generates a finding's output in JSON format for the AWS provider.
|
||||
Aws_Check_Output_JSON generates a finding's output in JSON format for the AWS provider.
|
||||
"""
|
||||
|
||||
Tenant_Domain: str = ""
|
||||
@@ -460,20 +312,6 @@ class Azure_Check_Output_JSON(Check_Output_JSON):
|
||||
super().__init__(**metadata)
|
||||
|
||||
|
||||
class Gcp_Check_Output_JSON(Check_Output_JSON):
|
||||
"""
|
||||
Gcp_Check_Output_JSON generates a finding's output in JSON format for the AWS provider.
|
||||
"""
|
||||
|
||||
ProjectId: str = ""
|
||||
ResourceId: str = ""
|
||||
ResourceName: str = ""
|
||||
Location: str = ""
|
||||
|
||||
def __init__(self, **metadata):
|
||||
super().__init__(**metadata)
|
||||
|
||||
|
||||
class Check_Output_CSV_ENS_RD2022(BaseModel):
|
||||
"""
|
||||
Check_Output_CSV_ENS_RD2022 generates a finding's output in CSV ENS RD2022 format.
|
||||
@@ -571,7 +409,6 @@ class Resource(BaseModel):
|
||||
class Compliance(BaseModel):
|
||||
Status: str
|
||||
RelatedRequirements: List[str]
|
||||
AssociatedStandards: List[dict]
|
||||
|
||||
|
||||
class Check_Output_JSON_ASFF(BaseModel):
|
||||
|
||||
@@ -20,7 +20,6 @@ from prowler.lib.outputs.models import (
|
||||
Check_Output_JSON_ASFF,
|
||||
generate_provider_output_csv,
|
||||
generate_provider_output_json,
|
||||
unroll_tags,
|
||||
)
|
||||
from prowler.providers.aws.lib.allowlist.allowlist import is_allowlisted
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
@@ -33,8 +32,6 @@ def stdout_report(finding, color, verbose, is_quiet):
|
||||
details = finding.region
|
||||
if finding.check_metadata.Provider == "azure":
|
||||
details = finding.check_metadata.ServiceName
|
||||
if finding.check_metadata.Provider == "gcp":
|
||||
details = finding.location
|
||||
|
||||
if verbose and not (is_quiet and finding.status != "FAIL"):
|
||||
print(
|
||||
@@ -73,7 +70,6 @@ def report(check_findings, output_options, audit_info):
|
||||
finding.check_metadata.CheckID,
|
||||
finding.region,
|
||||
finding.resource_id,
|
||||
unroll_tags(finding.resource_tags),
|
||||
):
|
||||
finding.status = "WARNING"
|
||||
# Print findings by stdout
|
||||
@@ -104,11 +100,13 @@ def report(check_findings, output_options, audit_info):
|
||||
file_descriptors,
|
||||
)
|
||||
|
||||
if "html" in file_descriptors:
|
||||
fill_html(file_descriptors["html"], finding)
|
||||
file_descriptors["html"].write("")
|
||||
|
||||
if "json-asff" in file_descriptors:
|
||||
finding_output = Check_Output_JSON_ASFF()
|
||||
fill_json_asff(
|
||||
finding_output, audit_info, finding, output_options
|
||||
)
|
||||
fill_json_asff(finding_output, audit_info, finding)
|
||||
|
||||
json.dump(
|
||||
finding_output.dict(),
|
||||
@@ -131,10 +129,6 @@ def report(check_findings, output_options, audit_info):
|
||||
)
|
||||
|
||||
# Common outputs
|
||||
if "html" in file_descriptors:
|
||||
fill_html(file_descriptors["html"], finding, output_options)
|
||||
file_descriptors["html"].write("")
|
||||
|
||||
if "csv" in file_descriptors:
|
||||
csv_writer, finding_output = generate_provider_output_csv(
|
||||
finding.check_metadata.Provider,
|
||||
@@ -142,7 +136,6 @@ def report(check_findings, output_options, audit_info):
|
||||
audit_info,
|
||||
"csv",
|
||||
file_descriptors["csv"],
|
||||
output_options,
|
||||
)
|
||||
csv_writer.writerow(finding_output.__dict__)
|
||||
|
||||
@@ -152,7 +145,7 @@ def report(check_findings, output_options, audit_info):
|
||||
finding,
|
||||
audit_info,
|
||||
"json",
|
||||
output_options,
|
||||
file_descriptors["json"],
|
||||
)
|
||||
json.dump(
|
||||
finding_output.dict(),
|
||||
@@ -210,8 +203,6 @@ def send_to_s3_bucket(
|
||||
filename = f"{output_filename}{json_asff_file_suffix}"
|
||||
elif output_mode == "html":
|
||||
filename = f"{output_filename}{html_file_suffix}"
|
||||
else: # Compliance output mode
|
||||
filename = f"{output_filename}_{output_mode}{csv_file_suffix}"
|
||||
logger.info(f"Sending outputs to S3 bucket {output_bucket}")
|
||||
bucket_remote_dir = output_directory
|
||||
while "prowler/" in bucket_remote_dir: # Check if it is not a custom directory
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
import sys
|
||||
|
||||
from slack_sdk import WebClient
|
||||
|
||||
from prowler.config.config import aws_logo, azure_logo, gcp_logo, square_logo_img
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
def send_slack_message(token, channel, stats, provider, audit_info):
|
||||
try:
|
||||
client = WebClient(token=token)
|
||||
identity, logo = create_message_identity(provider, audit_info)
|
||||
response = client.chat_postMessage(
|
||||
username="Prowler",
|
||||
icon_url=square_logo_img,
|
||||
channel="#" + channel,
|
||||
blocks=create_message_blocks(identity, logo, stats),
|
||||
)
|
||||
return response
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def create_message_identity(provider, audit_info):
|
||||
try:
|
||||
identity = ""
|
||||
logo = aws_logo
|
||||
if provider == "aws":
|
||||
identity = f"AWS Account *{audit_info.audited_account}*"
|
||||
elif provider == "gcp":
|
||||
identity = f"GCP Project *{audit_info.project_id}*"
|
||||
logo = gcp_logo
|
||||
elif provider == "azure":
|
||||
printed_subscriptions = []
|
||||
for key, value in audit_info.identity.subscriptions.items():
|
||||
intermediate = "- *" + key + ": " + value + "*\n"
|
||||
printed_subscriptions.append(intermediate)
|
||||
identity = f"Azure Subscriptions:\n{''.join(printed_subscriptions)}"
|
||||
logo = azure_logo
|
||||
return identity, logo
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def create_message_blocks(identity, logo, stats):
|
||||
try:
|
||||
blocks = [
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": f"Hey there 👋 \n I'm *Prowler*, _the handy cloud security tool_ :cloud::key:\n\n I have just finished the security assessment on your {identity} with a total of *{stats['findings_count']}* findings.",
|
||||
},
|
||||
"accessory": {
|
||||
"type": "image",
|
||||
"image_url": logo,
|
||||
"alt_text": "Provider Logo",
|
||||
},
|
||||
},
|
||||
{"type": "divider"},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": f"\n:white_check_mark: *{stats['total_pass']} Passed findings* ({round(stats['total_pass']/stats['findings_count']*100,2)}%)\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": f"\n:x: *{stats['total_fail']} Failed findings* ({round(stats['total_fail']/stats['findings_count']*100,2)}%)\n ",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": f"\n:bar_chart: *{stats['resources_count']} Scanned Resources*\n",
|
||||
},
|
||||
},
|
||||
{"type": "divider"},
|
||||
{
|
||||
"type": "context",
|
||||
"elements": [
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": f"Used parameters: `prowler {' '.join(sys.argv[1:])} `",
|
||||
}
|
||||
],
|
||||
},
|
||||
{"type": "divider"},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {"type": "mrkdwn", "text": "Join our Slack Community!"},
|
||||
"accessory": {
|
||||
"type": "button",
|
||||
"text": {"type": "plain_text", "text": "Prowler :slack:"},
|
||||
"url": "https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "Feel free to contact us in our repo",
|
||||
},
|
||||
"accessory": {
|
||||
"type": "button",
|
||||
"text": {"type": "plain_text", "text": "Prowler :github:"},
|
||||
"url": "https://github.com/prowler-cloud/prowler",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "See all the things you can do with ProwlerPro",
|
||||
},
|
||||
"accessory": {
|
||||
"type": "button",
|
||||
"text": {"type": "plain_text", "text": "Prowler Pro"},
|
||||
"url": "https://prowler.pro",
|
||||
},
|
||||
},
|
||||
]
|
||||
return blocks
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
@@ -20,18 +20,12 @@ def display_summary_table(
|
||||
entity_type = "Account"
|
||||
audited_entities = audit_info.audited_account
|
||||
elif provider == "azure":
|
||||
if (
|
||||
audit_info.identity.domain
|
||||
!= "Unknown tenant domain (missing AAD permissions)"
|
||||
):
|
||||
if audit_info.identity.domain:
|
||||
entity_type = "Tenant Domain"
|
||||
audited_entities = audit_info.identity.domain
|
||||
else:
|
||||
entity_type = "Tenant ID/s"
|
||||
audited_entities = " ".join(audit_info.identity.tenant_ids)
|
||||
elif provider == "gcp":
|
||||
entity_type = "Project ID"
|
||||
audited_entities = audit_info.project_id
|
||||
|
||||
if findings:
|
||||
current = {
|
||||
@@ -59,6 +53,7 @@ def display_summary_table(
|
||||
current["Service"] != finding.check_metadata.ServiceName
|
||||
and current["Service"]
|
||||
):
|
||||
|
||||
add_service_to_table(findings_table, current)
|
||||
|
||||
current["Total"] = current["Critical"] = current["High"] = current[
|
||||
|
||||
@@ -1,26 +1,16 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from hashlib import sha512
|
||||
from io import TextIOWrapper
|
||||
from os.path import exists
|
||||
from typing import Any
|
||||
|
||||
from detect_secrets import SecretsCollection
|
||||
from detect_secrets.settings import default_settings
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
def open_file(input_file: str, mode: str = "r") -> TextIOWrapper:
|
||||
try:
|
||||
f = open(input_file, mode)
|
||||
except OSError:
|
||||
logger.critical(
|
||||
"Ooops! You reached your user session maximum open files. To solve this issue, increase the shell session limit by running this command `ulimit -n 4096`. For more info visit https://docs.prowler.cloud/en/latest/troubleshooting/"
|
||||
)
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.critical(
|
||||
f"{input_file}: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]"
|
||||
@@ -59,20 +49,3 @@ def file_exists(filename: str):
|
||||
# create sha512 hash for string
|
||||
def hash_sha512(string: str) -> str:
|
||||
return sha512(string.encode("utf-8")).hexdigest()[0:9]
|
||||
|
||||
|
||||
def detect_secrets_scan(data):
|
||||
temp_data_file = tempfile.NamedTemporaryFile(delete=False)
|
||||
temp_data_file.write(bytes(data, encoding="raw_unicode_escape"))
|
||||
temp_data_file.close()
|
||||
|
||||
secrets = SecretsCollection()
|
||||
with default_settings():
|
||||
secrets.scan_file(temp_data_file.name)
|
||||
os.remove(temp_data_file.name)
|
||||
|
||||
detect_secrets_output = secrets.json()
|
||||
if detect_secrets_output:
|
||||
return detect_secrets_output[temp_data_file.name]
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -7,7 +7,6 @@ from botocore.credentials import RefreshableCredentials
|
||||
from botocore.session import get_session
|
||||
|
||||
from prowler.config.config import aws_services_json_file
|
||||
from prowler.lib.check.check import list_modules, recover_checks_from_service
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.utils.utils import open_file, parse_json_file
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Audit_Info
|
||||
@@ -36,7 +35,7 @@ class AWS_Provider:
|
||||
secret_key=audit_info.credentials.aws_secret_access_key,
|
||||
token=audit_info.credentials.aws_session_token,
|
||||
expiry_time=audit_info.credentials.expiration,
|
||||
refresh_using=self.refresh_credentials,
|
||||
refresh_using=self.refresh,
|
||||
method="sts-assume-role",
|
||||
)
|
||||
# Here we need the botocore session since it needs to use refreshable credentials
|
||||
@@ -60,7 +59,7 @@ class AWS_Provider:
|
||||
# Refresh credentials method using assume role
|
||||
# This method is called "adding ()" to the name, so it cannot accept arguments
|
||||
# https://github.com/boto/botocore/blob/098cc255f81a25b852e1ecdeb7adebd94c7b1b73/botocore/credentials.py#L570
|
||||
def refresh_credentials(self):
|
||||
def refresh(self):
|
||||
logger.info("Refreshing assumed credentials...")
|
||||
|
||||
response = assume_role(self.aws_session, self.role_info)
|
||||
@@ -85,7 +84,7 @@ def assume_role(session: session.Session, assumed_role_info: AWS_Assume_Role) ->
|
||||
if assumed_role_info.external_id:
|
||||
assumed_credentials = sts_client.assume_role(
|
||||
RoleArn=assumed_role_info.role_arn,
|
||||
RoleSessionName="ProwlerAsessmentSession",
|
||||
RoleSessionName="ProwlerProAsessmentSession",
|
||||
DurationSeconds=assumed_role_info.session_duration,
|
||||
ExternalId=assumed_role_info.external_id,
|
||||
)
|
||||
@@ -111,8 +110,8 @@ def generate_regional_clients(
|
||||
regional_clients = {}
|
||||
# Get json locally
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
with open_file(f"{actual_directory}/{aws_services_json_file}") as f:
|
||||
data = parse_json_file(f)
|
||||
f = open_file(f"{actual_directory}/{aws_services_json_file}")
|
||||
data = parse_json_file(f)
|
||||
# Check if it is a subservice
|
||||
json_regions = data["services"][service]["regions"][
|
||||
audit_info.audited_partition
|
||||
@@ -145,8 +144,8 @@ def generate_regional_clients(
|
||||
def get_aws_available_regions():
|
||||
try:
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
with open_file(f"{actual_directory}/{aws_services_json_file}") as f:
|
||||
data = parse_json_file(f)
|
||||
f = open_file(f"{actual_directory}/{aws_services_json_file}")
|
||||
data = parse_json_file(f)
|
||||
|
||||
regions = set()
|
||||
for service in data["services"].values():
|
||||
@@ -157,73 +156,3 @@ def get_aws_available_regions():
|
||||
except Exception as error:
|
||||
logger.error(f"{error.__class__.__name__}: {error}")
|
||||
return []
|
||||
|
||||
|
||||
def get_checks_from_input_arn(audit_resources: list, provider: str) -> set:
|
||||
"""get_checks_from_input_arn gets the list of checks from the input arns"""
|
||||
checks_from_arn = set()
|
||||
# Handle if there are audit resources so only their services are executed
|
||||
if audit_resources:
|
||||
services_without_subservices = ["guardduty", "kms", "s3", "elb"]
|
||||
service_list = set()
|
||||
sub_service_list = set()
|
||||
for resource in audit_resources:
|
||||
service = resource.split(":")[2]
|
||||
sub_service = resource.split(":")[5].split("/")[0].replace("-", "_")
|
||||
# WAF Services does not have checks
|
||||
if service != "wafv2" and service != "waf":
|
||||
# Parse services when they are different in the ARNs
|
||||
if service == "lambda":
|
||||
service = "awslambda"
|
||||
if service == "elasticloadbalancing":
|
||||
service = "elb"
|
||||
elif service == "logs":
|
||||
service = "cloudwatch"
|
||||
# Check if Prowler has checks in service
|
||||
try:
|
||||
list_modules(provider, service)
|
||||
except ModuleNotFoundError:
|
||||
# Service is not supported
|
||||
pass
|
||||
else:
|
||||
service_list.add(service)
|
||||
|
||||
# Get subservices to execute only applicable checks
|
||||
if service not in services_without_subservices:
|
||||
# Parse some specific subservices
|
||||
if service == "ec2":
|
||||
if sub_service == "security_group":
|
||||
sub_service = "securitygroup"
|
||||
if sub_service == "network_acl":
|
||||
sub_service = "networkacl"
|
||||
if sub_service == "image":
|
||||
sub_service = "ami"
|
||||
if service == "rds":
|
||||
if sub_service == "cluster_snapshot":
|
||||
sub_service = "snapshot"
|
||||
sub_service_list.add(sub_service)
|
||||
else:
|
||||
sub_service_list.add(service)
|
||||
|
||||
checks = recover_checks_from_service(service_list, provider)
|
||||
|
||||
# Filter only checks with audited subservices
|
||||
for check in checks:
|
||||
if any(sub_service in check for sub_service in sub_service_list):
|
||||
if not (sub_service == "policy" and "password_policy" in check):
|
||||
checks_from_arn.add(check)
|
||||
|
||||
# Return final checks list
|
||||
return sorted(checks_from_arn)
|
||||
|
||||
|
||||
def get_regions_from_audit_resources(audit_resources: list) -> list:
|
||||
"""get_regions_from_audit_resources gets the regions from the audit resources arns"""
|
||||
audited_regions = []
|
||||
for resource in audit_resources:
|
||||
region = resource.split(":")[3]
|
||||
if region and region not in audited_regions: # Check if arn has a region
|
||||
audited_regions.append(region)
|
||||
if audited_regions:
|
||||
return audited_regions
|
||||
return None
|
||||
|
||||
@@ -3,20 +3,12 @@ import sys
|
||||
|
||||
import yaml
|
||||
from boto3.dynamodb.conditions import Attr
|
||||
from schema import Optional, Schema
|
||||
from schema import Schema
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
allowlist_schema = Schema(
|
||||
{
|
||||
"Accounts": {
|
||||
str: {
|
||||
"Checks": {
|
||||
str: {"Regions": list, "Resources": list, Optional("Tags"): list}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
{"Accounts": {str: {"Checks": {str: {"Regions": list, "Resources": list}}}}}
|
||||
)
|
||||
|
||||
|
||||
@@ -69,25 +61,14 @@ def parse_allowlist_file(audit_info, allowlist_file):
|
||||
dynamodb_items.update(response["Items"])
|
||||
for item in dynamodb_items:
|
||||
# Create allowlist for every item
|
||||
if "Tags" in item:
|
||||
allowlist["Accounts"][item["Accounts"]] = {
|
||||
"Checks": {
|
||||
item["Checks"]: {
|
||||
"Regions": item["Regions"],
|
||||
"Resources": item["Resources"],
|
||||
"Tags": item["Tags"],
|
||||
}
|
||||
}
|
||||
}
|
||||
else:
|
||||
allowlist["Accounts"][item["Accounts"]] = {
|
||||
"Checks": {
|
||||
item["Checks"]: {
|
||||
"Regions": item["Regions"],
|
||||
"Resources": item["Resources"],
|
||||
}
|
||||
allowlist["Accounts"][item["Accounts"]] = {
|
||||
"Checks": {
|
||||
item["Checks"]: {
|
||||
"Regions": item["Regions"],
|
||||
"Resources": item["Resources"],
|
||||
}
|
||||
}
|
||||
}
|
||||
else:
|
||||
with open(allowlist_file) as f:
|
||||
allowlist = yaml.safe_load(f)["Allowlist"]
|
||||
@@ -106,18 +87,18 @@ def parse_allowlist_file(audit_info, allowlist_file):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_allowlisted(allowlist, audited_account, check, region, resource, tags):
|
||||
def is_allowlisted(allowlist, audited_account, check, region, resource):
|
||||
try:
|
||||
if audited_account in allowlist["Accounts"]:
|
||||
if is_allowlisted_in_check(
|
||||
allowlist, audited_account, check, region, resource, tags
|
||||
allowlist, audited_account, check, region, resource
|
||||
):
|
||||
return True
|
||||
# If there is a *, it affects to all accounts
|
||||
if "*" in allowlist["Accounts"]:
|
||||
audited_account = "*"
|
||||
if is_allowlisted_in_check(
|
||||
allowlist, audited_account, check, region, resource, tags
|
||||
allowlist, audited_account, check, region, resource
|
||||
):
|
||||
return True
|
||||
return False
|
||||
@@ -128,35 +109,21 @@ def is_allowlisted(allowlist, audited_account, check, region, resource, tags):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_allowlisted_in_check(allowlist, audited_account, check, region, resource, tags):
|
||||
def is_allowlisted_in_check(allowlist, audited_account, check, region, resource):
|
||||
try:
|
||||
for allowlisted_check in allowlist["Accounts"][audited_account][
|
||||
"Checks"
|
||||
].keys():
|
||||
# If there is a *, it affects to all checks
|
||||
if "*" == allowlisted_check:
|
||||
check = "*"
|
||||
if is_allowlisted_in_region(
|
||||
allowlist, audited_account, check, region, resource, tags
|
||||
):
|
||||
return True
|
||||
# Check if there is the specific check
|
||||
elif check == allowlisted_check:
|
||||
if is_allowlisted_in_region(
|
||||
allowlist, audited_account, check, region, resource, tags
|
||||
):
|
||||
return True
|
||||
# Check if check is a regex
|
||||
elif re.search(allowlisted_check, check):
|
||||
if is_allowlisted_in_region(
|
||||
allowlist,
|
||||
audited_account,
|
||||
allowlisted_check,
|
||||
region,
|
||||
resource,
|
||||
tags,
|
||||
):
|
||||
return True
|
||||
# If there is a *, it affects to all checks
|
||||
if "*" in allowlist["Accounts"][audited_account]["Checks"]:
|
||||
check = "*"
|
||||
if is_allowlisted_in_region(
|
||||
allowlist, audited_account, check, region, resource
|
||||
):
|
||||
return True
|
||||
# Check if there is the specific check
|
||||
if check in allowlist["Accounts"][audited_account]["Checks"]:
|
||||
if is_allowlisted_in_region(
|
||||
allowlist, audited_account, check, region, resource
|
||||
):
|
||||
return True
|
||||
return False
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
@@ -165,67 +132,30 @@ def is_allowlisted_in_check(allowlist, audited_account, check, region, resource,
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_allowlisted_in_region(allowlist, audited_account, check, region, resource, tags):
|
||||
def is_allowlisted_in_region(allowlist, audited_account, check, region, resource):
|
||||
try:
|
||||
# If there is a *, it affects to all regions
|
||||
if "*" in allowlist["Accounts"][audited_account]["Checks"][check]["Regions"]:
|
||||
for elem in allowlist["Accounts"][audited_account]["Checks"][check][
|
||||
"Resources"
|
||||
]:
|
||||
if is_allowlisted_in_tags(
|
||||
allowlist["Accounts"][audited_account]["Checks"][check],
|
||||
elem,
|
||||
resource,
|
||||
tags,
|
||||
):
|
||||
# Check if it is an *
|
||||
if elem == "*":
|
||||
elem = ".*"
|
||||
if re.search(elem, resource):
|
||||
return True
|
||||
# Check if there is the specific region
|
||||
if region in allowlist["Accounts"][audited_account]["Checks"][check]["Regions"]:
|
||||
for elem in allowlist["Accounts"][audited_account]["Checks"][check][
|
||||
"Resources"
|
||||
]:
|
||||
if is_allowlisted_in_tags(
|
||||
allowlist["Accounts"][audited_account]["Checks"][check],
|
||||
elem,
|
||||
resource,
|
||||
tags,
|
||||
):
|
||||
# Check if it is an *
|
||||
if elem == "*":
|
||||
elem = ".*"
|
||||
if re.search(elem, resource):
|
||||
return True
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_allowlisted_in_tags(check_allowlist, elem, resource, tags):
|
||||
try:
|
||||
# Check if it is an *
|
||||
if elem == "*":
|
||||
elem = ".*"
|
||||
# Check if there are allowlisted tags
|
||||
if "Tags" in check_allowlist:
|
||||
# Check if there are resource tags
|
||||
if not tags or not re.search(elem, resource):
|
||||
return False
|
||||
|
||||
all_allowed_tags_in_resource_tags = True
|
||||
for allowed_tag in check_allowlist["Tags"]:
|
||||
found_allowed_tag = False
|
||||
for resource_tag in tags:
|
||||
if re.search(allowed_tag, resource_tag):
|
||||
found_allowed_tag = True
|
||||
break
|
||||
|
||||
if not found_allowed_tag:
|
||||
all_allowed_tags_in_resource_tags = False
|
||||
|
||||
return all_allowed_tags_in_resource_tags
|
||||
else:
|
||||
if re.search(elem, resource):
|
||||
return True
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
@@ -1,48 +1,50 @@
|
||||
import re
|
||||
|
||||
from arnparse import arnparse
|
||||
|
||||
from prowler.providers.aws.lib.arn.error import (
|
||||
RoleArnParsingEmptyResource,
|
||||
RoleArnParsingFailedMissingFields,
|
||||
RoleArnParsingIAMRegionNotEmpty,
|
||||
RoleArnParsingInvalidAccountID,
|
||||
RoleArnParsingInvalidResourceType,
|
||||
RoleArnParsingPartitionEmpty,
|
||||
RoleArnParsingServiceNotIAMnorSTS,
|
||||
RoleArnParsingServiceNotIAM,
|
||||
)
|
||||
from prowler.providers.aws.lib.arn.models import ARN
|
||||
|
||||
|
||||
def parse_iam_credentials_arn(arn: str) -> ARN:
|
||||
arn_parsed = ARN(arn)
|
||||
# First check if region is empty (in IAM ARN's region is always empty)
|
||||
if arn_parsed.region:
|
||||
raise RoleArnParsingIAMRegionNotEmpty
|
||||
def arn_parsing(arn):
|
||||
# check for number of fields, must be six
|
||||
if len(arn.split(":")) != 6:
|
||||
raise RoleArnParsingFailedMissingFields
|
||||
else:
|
||||
# check if needed fields are filled:
|
||||
# - partition
|
||||
# - service
|
||||
# - account_id
|
||||
# - resource_type
|
||||
# - resource
|
||||
if arn_parsed.partition is None or arn_parsed.partition == "":
|
||||
raise RoleArnParsingPartitionEmpty
|
||||
elif arn_parsed.service != "iam" and arn_parsed.service != "sts":
|
||||
raise RoleArnParsingServiceNotIAMnorSTS
|
||||
elif (
|
||||
arn_parsed.account_id is None
|
||||
or len(arn_parsed.account_id) != 12
|
||||
or not arn_parsed.account_id.isnumeric()
|
||||
):
|
||||
raise RoleArnParsingInvalidAccountID
|
||||
elif (
|
||||
arn_parsed.resource_type != "role"
|
||||
and arn_parsed.resource_type != "user"
|
||||
and arn_parsed.resource_type != "assumed-role"
|
||||
):
|
||||
raise RoleArnParsingInvalidResourceType
|
||||
elif arn_parsed.resource == "":
|
||||
raise RoleArnParsingEmptyResource
|
||||
arn_parsed = arnparse(arn)
|
||||
# First check if region is empty (in IAM arns region is always empty)
|
||||
if arn_parsed.region is not None:
|
||||
raise RoleArnParsingIAMRegionNotEmpty
|
||||
else:
|
||||
return arn_parsed
|
||||
# check if needed fields are filled:
|
||||
# - partition
|
||||
# - service
|
||||
# - account_id
|
||||
# - resource_type
|
||||
# - resource
|
||||
if arn_parsed.partition is None:
|
||||
raise RoleArnParsingPartitionEmpty
|
||||
elif arn_parsed.service != "iam":
|
||||
raise RoleArnParsingServiceNotIAM
|
||||
elif (
|
||||
arn_parsed.account_id is None
|
||||
or len(arn_parsed.account_id) != 12
|
||||
or not arn_parsed.account_id.isnumeric()
|
||||
):
|
||||
raise RoleArnParsingInvalidAccountID
|
||||
elif arn_parsed.resource_type != "role":
|
||||
raise RoleArnParsingInvalidResourceType
|
||||
elif arn_parsed.resource == "":
|
||||
raise RoleArnParsingEmptyResource
|
||||
else:
|
||||
return arn_parsed
|
||||
|
||||
|
||||
def is_valid_arn(arn: str) -> bool:
|
||||
|
||||
@@ -1,49 +1,43 @@
|
||||
class RoleArnParsingFailedMissingFields(Exception):
|
||||
# The ARN contains a numberof fields different than six separated by :"
|
||||
# The arn contains a numberof fields different than six separated by :"
|
||||
def __init__(self):
|
||||
self.message = "The assumed role ARN contains an invalid number of fields separated by : or it does not start by arn, please input a valid ARN"
|
||||
self.message = "The assumed role arn contains a number of fields different than six separated by :, please input a valid arn"
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class RoleArnParsingIAMRegionNotEmpty(Exception):
|
||||
# The ARN contains a non-empty value for region, since it is an IAM ARN is not valid
|
||||
# The arn contains a non-empty value for region, since it is an IAM arn is not valid
|
||||
def __init__(self):
|
||||
self.message = "The assumed role ARN contains a non-empty value for region, since it is an IAM ARN is not valid, please input a valid ARN"
|
||||
self.message = "The assumed role arn contains a non-empty value for region, since it is an IAM arn is not valid, please input a valid arn"
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class RoleArnParsingPartitionEmpty(Exception):
|
||||
# The ARN contains an empty value for partition
|
||||
# The arn contains an empty value for partition
|
||||
def __init__(self):
|
||||
self.message = "The assumed role ARN does not contain a value for partition, please input a valid ARN"
|
||||
self.message = "The assumed role arn does not contain a value for partition, please input a valid arn"
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class RoleArnParsingServiceNotIAMnorSTS(Exception):
|
||||
class RoleArnParsingServiceNotIAM(Exception):
|
||||
def __init__(self):
|
||||
self.message = "The assumed role ARN contains a value for service distinct than IAM or STS, please input a valid ARN"
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class RoleArnParsingServiceNotSTS(Exception):
|
||||
def __init__(self):
|
||||
self.message = "The assumed role ARN contains a value for service distinct than STS, please input a valid ARN"
|
||||
self.message = "The assumed role arn contains a value for service distinct than iam, please input a valid arn"
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class RoleArnParsingInvalidAccountID(Exception):
|
||||
def __init__(self):
|
||||
self.message = "The assumed role ARN contains a value for account id empty or invalid, a valid account id must be composed of 12 numbers, please input a valid ARN"
|
||||
self.message = "The assumed role arn contains a value for account id empty or invalid, a valid account id must be composed of 12 numbers, please input a valid arn"
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class RoleArnParsingInvalidResourceType(Exception):
|
||||
def __init__(self):
|
||||
self.message = "The assumed role ARN contains a value for resource type different than role, please input a valid ARN"
|
||||
self.message = "The assumed role arn contains a value for resource type different than role, please input a valid arn"
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class RoleArnParsingEmptyResource(Exception):
|
||||
def __init__(self):
|
||||
self.message = "The assumed role ARN does not contain a value for resource, please input a valid ARN"
|
||||
self.message = "The assumed role arn does not contain a value for resource, please input a valid arn"
|
||||
super().__init__(self.message)
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.providers.aws.lib.arn.error import RoleArnParsingFailedMissingFields
|
||||
|
||||
|
||||
class ARN(BaseModel):
|
||||
partition: str
|
||||
service: str
|
||||
region: Optional[str] # In IAM ARN's do not have region
|
||||
account_id: str
|
||||
resource: str
|
||||
resource_type: str
|
||||
|
||||
def __init__(self, arn):
|
||||
# Validate the ARN
|
||||
## Check that arn starts with arn
|
||||
if not arn.startswith("arn:"):
|
||||
raise RoleArnParsingFailedMissingFields
|
||||
## Retrieve fields
|
||||
arn_elements = arn.split(":", 5)
|
||||
data = {
|
||||
"partition": arn_elements[1],
|
||||
"service": arn_elements[2],
|
||||
"region": arn_elements[3] if arn_elements[3] != "" else None,
|
||||
"account_id": arn_elements[4],
|
||||
"resource": arn_elements[5],
|
||||
"resource_type": get_arn_resource_type(arn, arn_elements[2]),
|
||||
}
|
||||
if "/" in data["resource"]:
|
||||
data["resource"] = data["resource"].split("/", 1)[1]
|
||||
elif ":" in data["resource"]:
|
||||
data["resource"] = data["resource"].split(":", 1)[1]
|
||||
|
||||
# Calls Pydantic's BaseModel __init__
|
||||
super().__init__(**data)
|
||||
|
||||
|
||||
def get_arn_resource_type(arn, service):
|
||||
if service == "s3":
|
||||
resource_type = "bucket"
|
||||
elif service == "sns":
|
||||
resource_type = "topic"
|
||||
elif service == "sqs":
|
||||
resource_type = "queue"
|
||||
elif service == "apigateway":
|
||||
split_parts = arn.split(":")[5].split("/")
|
||||
if "integration" in split_parts and "responses" in split_parts:
|
||||
resource_type = "restapis-resources-methods-integration-response"
|
||||
elif "documentation" in split_parts and "parts" in split_parts:
|
||||
resource_type = "restapis-documentation-parts"
|
||||
else:
|
||||
resource_type = arn.split(":")[5].split("/")[1]
|
||||
else:
|
||||
resource_type = arn.split(":")[5].split("/")[0]
|
||||
return resource_type
|
||||
@@ -1,59 +0,0 @@
|
||||
import sys
|
||||
|
||||
from boto3 import session
|
||||
from colorama import Fore, Style
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
|
||||
AWS_STS_GLOBAL_ENDPOINT_REGION = "us-east-1"
|
||||
|
||||
|
||||
def validate_aws_credentials(session: session, input_regions: list) -> dict:
|
||||
try:
|
||||
# For a valid STS GetCallerIdentity we have to use the right AWS Region
|
||||
if input_regions is None or len(input_regions) == 0:
|
||||
if session.region_name is not None:
|
||||
aws_region = session.region_name
|
||||
else:
|
||||
# If there is no region set passed with -f/--region
|
||||
# we use the Global STS Endpoint Region, us-east-1
|
||||
aws_region = AWS_STS_GLOBAL_ENDPOINT_REGION
|
||||
else:
|
||||
# Get the first region passed to the -f/--region
|
||||
aws_region = input_regions[0]
|
||||
validate_credentials_client = session.client("sts", aws_region)
|
||||
caller_identity = validate_credentials_client.get_caller_identity()
|
||||
# Include the region where the caller_identity has validated the credentials
|
||||
caller_identity["region"] = aws_region
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
return caller_identity
|
||||
|
||||
|
||||
def print_aws_credentials(audit_info: AWS_Audit_Info):
|
||||
# Beautify audited regions, set "all" if there is no filter region
|
||||
regions = (
|
||||
", ".join(audit_info.audited_regions)
|
||||
if audit_info.audited_regions is not None
|
||||
else "all"
|
||||
)
|
||||
# Beautify audited profile, set "default" if there is no profile set
|
||||
profile = audit_info.profile if audit_info.profile is not None else "default"
|
||||
|
||||
report = f"""
|
||||
This report is being generated using credentials below:
|
||||
|
||||
AWS-CLI Profile: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} AWS Filter Region: {Fore.YELLOW}[{regions}]{Style.RESET_ALL}
|
||||
AWS Account: {Fore.YELLOW}[{audit_info.audited_account}]{Style.RESET_ALL} UserId: {Fore.YELLOW}[{audit_info.audited_user_id}]{Style.RESET_ALL}
|
||||
Caller Identity ARN: {Fore.YELLOW}[{audit_info.audited_identity_arn}]{Style.RESET_ALL}
|
||||
"""
|
||||
# If -A is set, print Assumed Role ARN
|
||||
if audit_info.assumed_role_info.role_arn is not None:
|
||||
report += f"""Assumed Role ARN: {Fore.YELLOW}[{audit_info.assumed_role_info.role_arn}]{Style.RESET_ALL}
|
||||
"""
|
||||
print(report)
|
||||
@@ -1,40 +0,0 @@
|
||||
import sys
|
||||
|
||||
from boto3 import client
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info
|
||||
|
||||
|
||||
def get_organizations_metadata(
|
||||
metadata_account: str, assumed_credentials: dict
|
||||
) -> AWS_Organizations_Info:
|
||||
try:
|
||||
organizations_client = client(
|
||||
"organizations",
|
||||
aws_access_key_id=assumed_credentials["Credentials"]["AccessKeyId"],
|
||||
aws_secret_access_key=assumed_credentials["Credentials"]["SecretAccessKey"],
|
||||
aws_session_token=assumed_credentials["Credentials"]["SessionToken"],
|
||||
)
|
||||
organizations_metadata = organizations_client.describe_account(
|
||||
AccountId=metadata_account
|
||||
)
|
||||
list_tags_for_resource = organizations_client.list_tags_for_resource(
|
||||
ResourceId=metadata_account
|
||||
)
|
||||
except Exception as error:
|
||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
# Convert Tags dictionary to String
|
||||
account_details_tags = ""
|
||||
for tag in list_tags_for_resource["Tags"]:
|
||||
account_details_tags += tag["Key"] + ":" + tag["Value"] + ","
|
||||
organizations_info = AWS_Organizations_Info(
|
||||
account_details_email=organizations_metadata["Account"]["Email"],
|
||||
account_details_name=organizations_metadata["Account"]["Name"],
|
||||
account_details_arn=organizations_metadata["Account"]["Arn"],
|
||||
account_details_org=organizations_metadata["Account"]["Arn"].split("/")[1],
|
||||
account_details_tags=account_details_tags,
|
||||
)
|
||||
return organizations_info
|
||||
@@ -1,9 +1,7 @@
|
||||
import csv
|
||||
import json
|
||||
from copy import deepcopy
|
||||
|
||||
from alive_progress import alive_bar
|
||||
from botocore.client import ClientError
|
||||
from colorama import Fore, Style
|
||||
from tabulate import tabulate
|
||||
|
||||
@@ -14,15 +12,14 @@ from prowler.config.config import (
|
||||
output_file_timestamp,
|
||||
)
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.aws.lib.arn.models import get_arn_resource_type
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
|
||||
|
||||
def quick_inventory(audit_info: AWS_Audit_Info, output_directory: str):
|
||||
print(
|
||||
f"-=- Running Quick Inventory for AWS Account {Fore.YELLOW}{audit_info.audited_account}{Style.RESET_ALL} -=-\n"
|
||||
)
|
||||
resources = []
|
||||
global_resources = []
|
||||
total_resources_per_region = {}
|
||||
iam_was_scanned = False
|
||||
# If not inputed regions, check all of them
|
||||
if not audit_info.audited_regions:
|
||||
# EC2 client for describing all regions
|
||||
@@ -43,20 +40,42 @@ def quick_inventory(audit_info: AWS_Audit_Info, output_directory: str):
|
||||
enrich_print=False,
|
||||
) as bar:
|
||||
for region in sorted(audit_info.audited_regions):
|
||||
bar.title = f"Inventorying AWS Account {orange_color}{audit_info.audited_account}{Style.RESET_ALL}"
|
||||
bar.title = f"-> Scanning {orange_color}{region}{Style.RESET_ALL} region"
|
||||
resources_in_region = []
|
||||
# {
|
||||
# eu-west-1: 100,...
|
||||
# }
|
||||
|
||||
try:
|
||||
# Scan IAM only once
|
||||
if not iam_was_scanned:
|
||||
global_resources.extend(get_iam_resources(audit_info.audit_session))
|
||||
iam_was_scanned = True
|
||||
# If us-east-1 get IAM resources from there otherwise see if it is US GovCloud or China
|
||||
iam_client = audit_info.audit_session.client("iam")
|
||||
if (
|
||||
region == "us-east-1"
|
||||
or region == "us-gov-west-1"
|
||||
or region == "cn-north-1"
|
||||
):
|
||||
get_roles_paginator = iam_client.get_paginator("list_roles")
|
||||
for page in get_roles_paginator.paginate():
|
||||
for role in page["Roles"]:
|
||||
# Avoid aws-service-role roles
|
||||
if "aws-service-role" not in role["Arn"]:
|
||||
resources_in_region.append(role["Arn"])
|
||||
|
||||
# Get regional S3 buckets since none-tagged buckets are not supported by the resourcegroupstaggingapi
|
||||
resources_in_region.extend(get_regional_buckets(audit_info, region))
|
||||
get_users_paginator = iam_client.get_paginator("list_users")
|
||||
for page in get_users_paginator.paginate():
|
||||
for user in page["Users"]:
|
||||
resources_in_region.append(user["Arn"])
|
||||
|
||||
get_groups_paginator = iam_client.get_paginator("list_groups")
|
||||
for page in get_groups_paginator.paginate():
|
||||
for group in page["Groups"]:
|
||||
resources_in_region.append(group["Arn"])
|
||||
|
||||
get_policies_paginator = iam_client.get_paginator("list_policies")
|
||||
for page in get_policies_paginator.paginate(Scope="Local"):
|
||||
for policy in page["Policies"]:
|
||||
resources_in_region.append(policy["Arn"])
|
||||
|
||||
for saml_provider in iam_client.list_saml_providers()[
|
||||
"SAMLProviderList"
|
||||
]:
|
||||
resources_in_region.append(saml_provider["Arn"])
|
||||
|
||||
client = audit_info.audit_session.client(
|
||||
"resourcegroupstaggingapi", region_name=region
|
||||
@@ -67,27 +86,12 @@ def quick_inventory(audit_info: AWS_Audit_Info, output_directory: str):
|
||||
for page in get_resources_paginator.paginate():
|
||||
resources_count += len(page["ResourceTagMappingList"])
|
||||
for resource in page["ResourceTagMappingList"]:
|
||||
# Avoid adding S3 buckets again:
|
||||
if resource["ResourceARN"].split(":")[2] != "s3":
|
||||
# Check if region is not in ARN --> Global service
|
||||
if not resource["ResourceARN"].split(":")[3]:
|
||||
global_resources.append(
|
||||
{
|
||||
"arn": resource["ResourceARN"],
|
||||
"tags": resource["Tags"],
|
||||
}
|
||||
)
|
||||
else:
|
||||
resources_in_region.append(
|
||||
{
|
||||
"arn": resource["ResourceARN"],
|
||||
"tags": resource["Tags"],
|
||||
}
|
||||
)
|
||||
resources_in_region.append(resource["ResourceARN"])
|
||||
bar()
|
||||
if len(resources_in_region) > 0:
|
||||
total_resources_per_region[region] = len(resources_in_region)
|
||||
bar.text = f"-> Found {Fore.GREEN}{len(resources_in_region)}{Style.RESET_ALL} resources in {region}"
|
||||
print(
|
||||
f"Found {Fore.GREEN}{len(resources_in_region)}{Style.RESET_ALL} resources in region {Fore.YELLOW}{region}{Style.RESET_ALL}"
|
||||
)
|
||||
print("\n")
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -98,26 +102,20 @@ def quick_inventory(audit_info: AWS_Audit_Info, output_directory: str):
|
||||
resources.extend(resources_in_region)
|
||||
bar.title = f"-> {Fore.GREEN}Quick Inventory completed!{Style.RESET_ALL}"
|
||||
|
||||
resources.extend(global_resources)
|
||||
total_resources_per_region["global"] = len(global_resources)
|
||||
inventory_table = create_inventory_table(resources, total_resources_per_region)
|
||||
inventory_table = create_inventory_table(resources)
|
||||
|
||||
print(
|
||||
f"\nQuick Inventory of AWS Account {Fore.YELLOW}{audit_info.audited_account}{Style.RESET_ALL}:"
|
||||
)
|
||||
|
||||
print(
|
||||
tabulate(
|
||||
inventory_table, headers="keys", tablefmt="rounded_grid", stralign="left"
|
||||
)
|
||||
)
|
||||
print(tabulate(inventory_table, headers="keys", tablefmt="rounded_grid"))
|
||||
|
||||
print(f"\nTotal resources found: {Fore.GREEN}{len(resources)}{Style.RESET_ALL}")
|
||||
|
||||
create_output(resources, audit_info, output_directory)
|
||||
|
||||
|
||||
def create_inventory_table(resources: list, resources_in_region: dict) -> dict:
|
||||
regions_with_resources = list(resources_in_region.keys())
|
||||
def create_inventory_table(resources: list) -> dict:
|
||||
services = {}
|
||||
# { "S3":
|
||||
# 123,
|
||||
@@ -126,79 +124,54 @@ def create_inventory_table(resources: list, resources_in_region: dict) -> dict:
|
||||
# }
|
||||
resources_type = {}
|
||||
# { "S3":
|
||||
# "Buckets":
|
||||
# eu-west-1: 10,
|
||||
# eu-west-2: 3,
|
||||
# "Buckets": 13,
|
||||
# "IAM":
|
||||
# "Roles":
|
||||
# us-east-1: 143,
|
||||
# "Users":
|
||||
# us-west-2: 22,
|
||||
# "Roles": 143,
|
||||
# "Users": 22,
|
||||
# }
|
||||
|
||||
inventory_table = {
|
||||
"Service": [],
|
||||
f"Total\n({Fore.GREEN}{str(len(resources))}{Style.RESET_ALL})": [],
|
||||
"Total per\nresource type": [],
|
||||
}
|
||||
|
||||
for region, count in resources_in_region.items():
|
||||
inventory_table[f"{region}\n({Fore.GREEN}{str(count)}{Style.RESET_ALL})"] = []
|
||||
|
||||
for resource in sorted(resources, key=lambda d: d["arn"]):
|
||||
service = resource["arn"].split(":")[2]
|
||||
region = resource["arn"].split(":")[3]
|
||||
if not region:
|
||||
region = "global"
|
||||
for resource in sorted(resources):
|
||||
service = resource.split(":")[2]
|
||||
if service not in services:
|
||||
services[service] = 0
|
||||
services[service] += 1
|
||||
|
||||
resource_type = get_arn_resource_type(resource["arn"], service)
|
||||
|
||||
if service == "s3":
|
||||
resource_type = "bucket"
|
||||
elif service == "sns":
|
||||
resource_type = "topic"
|
||||
elif service == "sqs":
|
||||
resource_type = "queue"
|
||||
elif service == "apigateway":
|
||||
split_parts = resource.split(":")[5].split("/")
|
||||
if "integration" in split_parts and "responses" in split_parts:
|
||||
resource_type = "restapis-resources-methods-integration-response"
|
||||
elif "documentation" in split_parts and "parts" in split_parts:
|
||||
resource_type = "restapis-documentation-parts"
|
||||
else:
|
||||
resource_type = resource.split(":")[5].split("/")[1]
|
||||
else:
|
||||
resource_type = resource.split(":")[5].split("/")[0]
|
||||
if service not in resources_type:
|
||||
resources_type[service] = {}
|
||||
if resource_type not in resources_type[service]:
|
||||
resources_type[service][resource_type] = {}
|
||||
if region not in resources_type[service][resource_type]:
|
||||
resources_type[service][resource_type][region] = 0
|
||||
resources_type[service][resource_type][region] += 1
|
||||
resources_type[service][resource_type] = 0
|
||||
resources_type[service][resource_type] += 1
|
||||
|
||||
# Add results to inventory table
|
||||
inventory_table = {
|
||||
"Service": [],
|
||||
"Total": [],
|
||||
"Count per resource types": [],
|
||||
}
|
||||
for service in services:
|
||||
pending_regions = deepcopy(regions_with_resources)
|
||||
aux = {}
|
||||
# {
|
||||
# "region": summary,
|
||||
# }
|
||||
summary = ""
|
||||
inventory_table["Service"].append(f"{service}")
|
||||
inventory_table[
|
||||
f"Total\n({Fore.GREEN}{str(len(resources))}{Style.RESET_ALL})"
|
||||
].append(f"{Fore.GREEN}{services[service]}{Style.RESET_ALL}")
|
||||
for resource_type, regions in resources_type[service].items():
|
||||
summary += f"{resource_type} {Fore.GREEN}{str(sum(regions.values()))}{Style.RESET_ALL}\n"
|
||||
# Check if region does not have resource type
|
||||
for region in pending_regions:
|
||||
if region not in aux:
|
||||
aux[region] = ""
|
||||
if region not in regions:
|
||||
aux[region] += "-\n"
|
||||
for region, count in regions.items():
|
||||
aux[region] += f"{Fore.GREEN}{str(count)}{Style.RESET_ALL}\n"
|
||||
# Add Total per resource type
|
||||
inventory_table["Total per\nresource type"].append(summary)
|
||||
# Add Total per region
|
||||
for region, text in aux.items():
|
||||
inventory_table[
|
||||
f"{region}\n({Fore.GREEN}{str(resources_in_region[region])}{Style.RESET_ALL})"
|
||||
].append(text)
|
||||
if region in pending_regions:
|
||||
pending_regions.remove(region)
|
||||
for region_without_resource in pending_regions:
|
||||
inventory_table[
|
||||
f"{region_without_resource}\n ({Fore.GREEN}{str(resources_in_region[region_without_resource])}{Style.RESET_ALL})"
|
||||
].append("-")
|
||||
inventory_table["Total"].append(
|
||||
f"{Fore.GREEN}{services[service]}{Style.RESET_ALL}"
|
||||
)
|
||||
for resource_type in resources_type[service]:
|
||||
summary += f"{resource_type} {Fore.GREEN}{resources_type[service][resource_type]}{Style.RESET_ALL}\n"
|
||||
inventory_table["Count per resource types"].append(summary)
|
||||
|
||||
return inventory_table
|
||||
|
||||
@@ -207,37 +180,30 @@ def create_output(resources: list, audit_info: AWS_Audit_Info, output_directory:
|
||||
json_output = []
|
||||
output_file = f"{output_directory}/prowler-inventory-{audit_info.audited_account}-{output_file_timestamp}"
|
||||
|
||||
for item in sorted(resources, key=lambda d: d["arn"]):
|
||||
for item in sorted(resources):
|
||||
resource = {}
|
||||
resource["AWS_AccountID"] = audit_info.audited_account
|
||||
resource["AWS_Region"] = item["arn"].split(":")[3]
|
||||
resource["AWS_Partition"] = item["arn"].split(":")[1]
|
||||
resource["AWS_Service"] = item["arn"].split(":")[2]
|
||||
resource["AWS_ResourceType"] = item["arn"].split(":")[5].split("/")[0]
|
||||
resource["AWS_Region"] = item.split(":")[3]
|
||||
resource["AWS_Partition"] = item.split(":")[1]
|
||||
resource["AWS_Service"] = item.split(":")[2]
|
||||
resource["AWS_ResourceType"] = item.split(":")[5].split("/")[0]
|
||||
resource["AWS_ResourceID"] = ""
|
||||
if len(item["arn"].split("/")) > 1:
|
||||
resource["AWS_ResourceID"] = item["arn"].split("/")[-1]
|
||||
elif len(item["arn"].split(":")) > 6:
|
||||
resource["AWS_ResourceID"] = item["arn"].split(":")[-1]
|
||||
resource["AWS_ResourceARN"] = item["arn"]
|
||||
if len(item.split("/")) > 1:
|
||||
resource["AWS_ResourceID"] = item.split("/")[-1]
|
||||
elif len(item.split(":")) > 6:
|
||||
resource["AWS_ResourceID"] = item.split(":")[-1]
|
||||
resource["AWS_ResourceARN"] = item
|
||||
# Cover S3 case
|
||||
if resource["AWS_Service"] == "s3":
|
||||
resource["AWS_ResourceType"] = "bucket"
|
||||
resource["AWS_ResourceID"] = item["arn"].split(":")[-1]
|
||||
resource["AWS_ResourceID"] = item.split(":")[-1]
|
||||
# Cover WAFv2 case
|
||||
if resource["AWS_Service"] == "wafv2":
|
||||
resource["AWS_ResourceType"] = "/".join(
|
||||
item["arn"].split(":")[-1].split("/")[:-2]
|
||||
)
|
||||
resource["AWS_ResourceID"] = "/".join(
|
||||
item["arn"].split(":")[-1].split("/")[2:]
|
||||
)
|
||||
resource["AWS_ResourceType"] = "/".join(item.split(":")[-1].split("/")[:-2])
|
||||
resource["AWS_ResourceID"] = "/".join(item.split(":")[-1].split("/")[2:])
|
||||
# Cover Config case
|
||||
if resource["AWS_Service"] == "config":
|
||||
resource["AWS_ResourceID"] = "/".join(
|
||||
item["arn"].split(":")[-1].split("/")[1:]
|
||||
)
|
||||
resource["AWS_Tags"] = item["tags"]
|
||||
resource["AWS_ResourceID"] = "/".join(item.split(":")[-1].split("/")[1:])
|
||||
json_output.append(resource)
|
||||
|
||||
# Serializing json
|
||||
@@ -263,64 +229,3 @@ def create_output(resources: list, audit_info: AWS_Audit_Info, output_directory:
|
||||
print("\nMore details in files:")
|
||||
print(f" - CSV: {output_file+csv_file_suffix}")
|
||||
print(f" - JSON: {output_file+json_file_suffix}")
|
||||
|
||||
|
||||
def get_regional_buckets(audit_info: AWS_Audit_Info, region: str) -> list:
|
||||
regional_buckets = []
|
||||
s3_client = audit_info.audit_session.client("s3", region_name=region)
|
||||
buckets = s3_client.list_buckets()
|
||||
for bucket in buckets["Buckets"]:
|
||||
bucket_region = s3_client.get_bucket_location(Bucket=bucket["Name"])[
|
||||
"LocationConstraint"
|
||||
]
|
||||
if bucket_region == "EU": # If EU, bucket_region is eu-west-1
|
||||
bucket_region = "eu-west-1"
|
||||
if not bucket_region: # If None, bucket_region is us-east-1
|
||||
bucket_region = "us-east-1"
|
||||
if bucket_region == region: # Only add bucket if is in current region
|
||||
try:
|
||||
bucket_tags = s3_client.get_bucket_tagging(Bucket=bucket["Name"])[
|
||||
"TagSet"
|
||||
]
|
||||
except ClientError as error:
|
||||
bucket_tags = []
|
||||
if error.response["Error"]["Code"] != "NoSuchTagSet":
|
||||
logger.error(
|
||||
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
bucket_arn = (
|
||||
f"arn:{audit_info.audited_partition}:s3:{region}::{bucket['Name']}"
|
||||
)
|
||||
regional_buckets.append({"arn": bucket_arn, "tags": bucket_tags})
|
||||
return regional_buckets
|
||||
|
||||
|
||||
def get_iam_resources(session) -> list:
|
||||
iam_resources = []
|
||||
iam_client = session.client("iam")
|
||||
get_roles_paginator = iam_client.get_paginator("list_roles")
|
||||
for page in get_roles_paginator.paginate():
|
||||
for role in page["Roles"]:
|
||||
# Avoid aws-service-role roles
|
||||
if "aws-service-role" not in role["Arn"]:
|
||||
iam_resources.append({"arn": role["Arn"], "tags": role.get("Tags")})
|
||||
|
||||
get_users_paginator = iam_client.get_paginator("list_users")
|
||||
for page in get_users_paginator.paginate():
|
||||
for user in page["Users"]:
|
||||
iam_resources.append({"arn": user["Arn"], "tags": user.get("Tags")})
|
||||
|
||||
get_groups_paginator = iam_client.get_paginator("list_groups")
|
||||
for page in get_groups_paginator.paginate():
|
||||
for group in page["Groups"]:
|
||||
iam_resources.append({"arn": group["Arn"], "tags": []})
|
||||
|
||||
get_policies_paginator = iam_client.get_paginator("list_policies")
|
||||
for page in get_policies_paginator.paginate(Scope="Local"):
|
||||
for policy in page["Policies"]:
|
||||
iam_resources.append({"arn": policy["Arn"], "tags": policy.get("Tags")})
|
||||
|
||||
for saml_provider in iam_client.list_saml_providers()["SAMLProviderList"]:
|
||||
iam_resources.append({"arn": saml_provider["Arn"], "tags": []})
|
||||
|
||||
return iam_resources
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
import sys
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.aws.aws_provider import generate_regional_clients
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
|
||||
|
||||
def get_tagged_resources(input_resource_tags: list, current_audit_info: AWS_Audit_Info):
|
||||
"""
|
||||
get_tagged_resources returns a list of the resources that are going to be scanned based on the given input tags
|
||||
"""
|
||||
try:
|
||||
resource_tags = []
|
||||
tagged_resources = []
|
||||
for tag in input_resource_tags:
|
||||
key = tag.split("=")[0]
|
||||
value = tag.split("=")[1]
|
||||
resource_tags.append({"Key": key, "Values": [value]})
|
||||
# Get Resources with resource_tags for all regions
|
||||
for regional_client in generate_regional_clients(
|
||||
"resourcegroupstaggingapi", current_audit_info
|
||||
).values():
|
||||
try:
|
||||
get_resources_paginator = regional_client.get_paginator("get_resources")
|
||||
for page in get_resources_paginator.paginate(TagFilters=resource_tags):
|
||||
for resource in page["ResourceTagMappingList"]:
|
||||
tagged_resources.append(resource["ResourceARN"])
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
return tagged_resources
|
||||
@@ -26,6 +26,10 @@
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
|
||||
@@ -17,7 +17,6 @@ class accessanalyzer_enabled(Check):
|
||||
)
|
||||
report.resource_id = analyzer.name
|
||||
report.resource_arn = analyzer.arn
|
||||
report.resource_tags = analyzer.tags
|
||||
|
||||
elif analyzer.status == "NOT_AVAILABLE":
|
||||
report.status = "FAIL"
|
||||
@@ -32,7 +31,6 @@ class accessanalyzer_enabled(Check):
|
||||
)
|
||||
report.resource_id = analyzer.name
|
||||
report.resource_arn = analyzer.arn
|
||||
report.resource_tags = analyzer.tags
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -26,6 +26,10 @@
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
|
||||
@@ -17,7 +17,6 @@ class accessanalyzer_enabled_without_findings(Check):
|
||||
)
|
||||
report.resource_id = analyzer.name
|
||||
report.resource_arn = analyzer.arn
|
||||
report.resource_tags = analyzer.tags
|
||||
if len(analyzer.findings) != 0:
|
||||
active_finding_counter = 0
|
||||
for finding in analyzer.findings:
|
||||
@@ -29,7 +28,6 @@ class accessanalyzer_enabled_without_findings(Check):
|
||||
report.status_extended = f"IAM Access Analyzer {analyzer.name} has {active_finding_counter} active findings"
|
||||
report.resource_id = analyzer.name
|
||||
report.resource_arn = analyzer.arn
|
||||
report.resource_tags = analyzer.tags
|
||||
elif analyzer.status == "NOT_AVAILABLE":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
@@ -43,7 +41,6 @@ class accessanalyzer_enabled_without_findings(Check):
|
||||
)
|
||||
report.resource_id = analyzer.name
|
||||
report.resource_arn = analyzer.arn
|
||||
report.resource_tags = analyzer.tags
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import threading
|
||||
from typing import Optional
|
||||
|
||||
from botocore.exceptions import ClientError
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
@@ -50,7 +48,7 @@ class AccessAnalyzer:
|
||||
arn=analyzer["arn"],
|
||||
name=analyzer["name"],
|
||||
status=analyzer["status"],
|
||||
tags=[analyzer.get("tags")],
|
||||
tags=str(analyzer["tags"]),
|
||||
type=analyzer["type"],
|
||||
region=regional_client.region,
|
||||
)
|
||||
@@ -62,7 +60,7 @@ class AccessAnalyzer:
|
||||
arn="",
|
||||
name=self.audited_account,
|
||||
status="NOT_AVAILABLE",
|
||||
tags=[],
|
||||
tags="",
|
||||
type="",
|
||||
region=regional_client.region,
|
||||
)
|
||||
@@ -80,21 +78,10 @@ class AccessAnalyzer:
|
||||
if analyzer.status == "ACTIVE":
|
||||
regional_client = self.regional_clients[analyzer.region]
|
||||
for finding in analyzer.findings:
|
||||
try:
|
||||
finding_information = regional_client.get_finding(
|
||||
analyzerArn=analyzer.arn, id=finding.id
|
||||
)
|
||||
finding.status = finding_information["finding"]["status"]
|
||||
except ClientError as error:
|
||||
if (
|
||||
error.response["Error"]["Code"]
|
||||
== "ResourceNotFoundException"
|
||||
):
|
||||
logger.warning(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
finding.status = ""
|
||||
continue
|
||||
finding_information = regional_client.get_finding(
|
||||
analyzerArn=analyzer.arn, id=finding.id
|
||||
)
|
||||
finding.status = finding_information["finding"]["status"]
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -132,6 +119,6 @@ class Analyzer(BaseModel):
|
||||
name: str
|
||||
status: str
|
||||
findings: list[Finding] = []
|
||||
tags: Optional[list] = []
|
||||
tags: str
|
||||
type: str
|
||||
region: str
|
||||
|
||||
@@ -26,6 +26,10 @@
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
|
||||
@@ -26,6 +26,10 @@
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
|
||||
@@ -26,6 +26,10 @@
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
|
||||
@@ -1,20 +1,12 @@
|
||||
from prowler.providers.aws.aws_provider import generate_regional_clients
|
||||
|
||||
|
||||
################## Account
|
||||
|
||||
|
||||
class Account:
|
||||
def __init__(self, audit_info):
|
||||
self.service = "account"
|
||||
self.session = audit_info.audit_session
|
||||
self.audited_account = audit_info.audited_account
|
||||
self.regional_clients = generate_regional_clients(self.service, audit_info)
|
||||
# If the region is not set in the audit profile,
|
||||
# we pick the first region from the regional clients list
|
||||
self.region = (
|
||||
audit_info.profile_region
|
||||
if audit_info.profile_region
|
||||
else list(self.regional_clients.keys())[0]
|
||||
)
|
||||
self.region = audit_info.profile_region
|
||||
|
||||
def __get_session__(self):
|
||||
return self.session
|
||||
|
||||
@@ -26,6 +26,10 @@
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
|
||||
@@ -15,13 +15,11 @@ class acm_certificates_expiration_check(Check):
|
||||
report.status_extended = f"ACM Certificate for {certificate.name} expires in {certificate.expiration_days} days."
|
||||
report.resource_id = certificate.name
|
||||
report.resource_arn = certificate.arn
|
||||
report.resource_tags = certificate.tags
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"ACM Certificate for {certificate.name} is about to expire in {DAYS_TO_EXPIRE_THRESHOLD} days."
|
||||
report.resource_id = certificate.name
|
||||
report.resource_arn = certificate.arn
|
||||
report.resource_tags = certificate.tags
|
||||
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -26,6 +26,10 @@
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
|
||||
@@ -15,19 +15,16 @@ class acm_certificates_transparency_logs_enabled(Check):
|
||||
)
|
||||
report.resource_id = certificate.name
|
||||
report.resource_arn = certificate.arn
|
||||
report.resource_tags = certificate.tags
|
||||
else:
|
||||
if not certificate.transparency_logging:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"ACM Certificate for {certificate.name} has Certificate Transparency logging disabled."
|
||||
report.resource_id = certificate.name
|
||||
report.resource_arn = certificate.arn
|
||||
report.resource_tags = certificate.tags
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"ACM Certificate for {certificate.name} has Certificate Transparency logging enabled."
|
||||
report.resource_id = certificate.name
|
||||
report.resource_arn = certificate.arn
|
||||
report.resource_tags = certificate.tags
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
from dataclasses import dataclass
|
||||
|
||||
from prowler.config.config import timestamp_utc
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
|
||||
from prowler.providers.aws.aws_provider import generate_regional_clients
|
||||
@@ -20,7 +18,6 @@ class ACM:
|
||||
self.certificates = []
|
||||
self.__threading_call__(self.__list_certificates__)
|
||||
self.__describe_certificates__()
|
||||
self.__list_tags_for_certificate__()
|
||||
|
||||
def __get_session__(self):
|
||||
return self.session
|
||||
@@ -47,26 +44,12 @@ class ACM:
|
||||
certificate["CertificateArn"], self.audit_resources
|
||||
)
|
||||
):
|
||||
if "NotAfter" in certificate:
|
||||
# We need to get the TZ info to be able to do the math
|
||||
certificate_expiration_time = (
|
||||
certificate["NotAfter"]
|
||||
- datetime.now(
|
||||
certificate["NotAfter"].tzinfo
|
||||
if hasattr(certificate["NotAfter"], "tzinfo")
|
||||
else None
|
||||
)
|
||||
).days
|
||||
else:
|
||||
certificate_expiration_time = 0
|
||||
self.certificates.append(
|
||||
Certificate(
|
||||
arn=certificate["CertificateArn"],
|
||||
name=certificate["DomainName"],
|
||||
type=certificate["Type"],
|
||||
expiration_days=certificate_expiration_time,
|
||||
transparency_logging=False,
|
||||
region=regional_client.region,
|
||||
certificate["CertificateArn"],
|
||||
certificate["DomainName"],
|
||||
False,
|
||||
regional_client.region,
|
||||
)
|
||||
)
|
||||
except Exception as error:
|
||||
@@ -82,6 +65,13 @@ class ACM:
|
||||
response = regional_client.describe_certificate(
|
||||
CertificateArn=certificate.arn
|
||||
)["Certificate"]
|
||||
certificate.type = response["Type"]
|
||||
if "NotAfter" in response:
|
||||
certificate.expiration_days = (
|
||||
response["NotAfter"] - timestamp_utc
|
||||
).days
|
||||
else:
|
||||
certificate.expiration_days = 0
|
||||
if (
|
||||
response["Options"]["CertificateTransparencyLoggingPreference"]
|
||||
== "ENABLED"
|
||||
@@ -92,26 +82,24 @@ class ACM:
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __list_tags_for_certificate__(self):
|
||||
logger.info("ACM - List Tags...")
|
||||
try:
|
||||
for certificate in self.certificates:
|
||||
regional_client = self.regional_clients[certificate.region]
|
||||
response = regional_client.list_tags_for_certificate(
|
||||
CertificateArn=certificate.arn
|
||||
)["Tags"]
|
||||
certificate.tags = response
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class Certificate(BaseModel):
|
||||
@dataclass
|
||||
class Certificate:
|
||||
arn: str
|
||||
name: str
|
||||
type: str
|
||||
tags: Optional[list] = []
|
||||
expiration_days: int
|
||||
transparency_logging: Optional[bool]
|
||||
transparency_logging: bool
|
||||
region: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
arn,
|
||||
name,
|
||||
transparency_logging,
|
||||
region,
|
||||
):
|
||||
self.arn = arn
|
||||
self.name = name
|
||||
self.transparency_logging = transparency_logging
|
||||
self.region = region
|
||||
|
||||
@@ -26,6 +26,10 @@
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
|
||||
@@ -12,7 +12,6 @@ class apigateway_authorizers_enabled(Check):
|
||||
report.region = rest_api.region
|
||||
report.resource_id = rest_api.name
|
||||
report.resource_arn = rest_api.arn
|
||||
report.resource_tags = rest_api.tags
|
||||
if rest_api.authorizer:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"API Gateway {rest_api.name} ID {rest_api.id} has authorizer configured."
|
||||
|
||||