mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
32 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b0c4cb98e9 | ||
|
|
70e4ebccab | ||
|
|
140f87c741 | ||
|
|
b0d756123e | ||
|
|
6188c92916 | ||
|
|
34c6f96728 | ||
|
|
50fd047c0b | ||
|
|
5bcc05b536 | ||
|
|
ce7d6c8dd5 | ||
|
|
d87a1e28b4 | ||
|
|
227306c572 | ||
|
|
45c2691f89 | ||
|
|
d0c81245b8 | ||
|
|
e494afb1aa | ||
|
|
ecc3c1cf3b | ||
|
|
228b16416a | ||
|
|
17eb74842a | ||
|
|
c01ff74c73 | ||
|
|
f88613b26d | ||
|
|
3464f4241f | ||
|
|
849b703828 | ||
|
|
4b935a40b6 | ||
|
|
5873a23ccb | ||
|
|
eae2786825 | ||
|
|
6407386de5 | ||
|
|
3fe950723f | ||
|
|
52bf6acd46 | ||
|
|
9590e7d7e0 | ||
|
|
7a08140a2d | ||
|
|
d1491cfbd1 | ||
|
|
695b80549d | ||
|
|
11c60a637f |
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -1 +1 @@
|
||||
* @prowler-cloud/prowler-team
|
||||
* @prowler-cloud/prowler-oss
|
||||
|
||||
19
.github/workflows/build-lint-push-containers.yml
vendored
19
.github/workflows/build-lint-push-containers.yml
vendored
@@ -47,14 +47,33 @@ jobs:
|
||||
container-build:
|
||||
# needs: dockerfile-linter
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: setup python (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
- name: Install dependencies (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
- name: Update Prowler version (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
poetry version ${{ github.event.release.tag_name }}
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
context: .
|
||||
# Without pushing to registries
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }}
|
||||
|
||||
20
.github/workflows/pull-request.yml
vendored
20
.github/workflows/pull-request.yml
vendored
@@ -24,9 +24,9 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install pipenv
|
||||
pipenv install --dev
|
||||
pipenv run pip list
|
||||
pip install poetry
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
@@ -34,25 +34,25 @@ jobs:
|
||||
&& chmod +x /tmp/hadolint
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
pipenv run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
|
||||
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
|
||||
- name: Checking format with black
|
||||
run: |
|
||||
pipenv run black --check .
|
||||
poetry run black --check .
|
||||
- name: Lint with pylint
|
||||
run: |
|
||||
pipenv run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
- name: Bandit
|
||||
run: |
|
||||
pipenv run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
- name: Safety
|
||||
run: |
|
||||
pipenv run safety check
|
||||
poetry run safety check
|
||||
- name: Vulture
|
||||
run: |
|
||||
pipenv run vulture --exclude "contrib" --min-confidence 100 .
|
||||
poetry run vulture --exclude "contrib" --min-confidence 100 .
|
||||
- name: Hadolint
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
pipenv run pytest tests -n auto
|
||||
poetry run pytest tests -n auto
|
||||
|
||||
57
.github/workflows/pypi-release.yml
vendored
57
.github/workflows/pypi-release.yml
vendored
@@ -5,11 +5,14 @@ on:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
GITHUB_BRANCH: ${{ github.event.release.tag_name }}
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
GITHUB_BRANCH: master
|
||||
|
||||
jobs:
|
||||
release-prowler-job:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
name: Release Prowler to PyPI
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
@@ -22,20 +25,46 @@ jobs:
|
||||
python-version: 3.9 #install the python needed
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install build toml --upgrade
|
||||
- name: Build package
|
||||
run: python -m build
|
||||
- name: Publish prowler-cloud package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
- name: Change version and Build package
|
||||
run: |
|
||||
poetry version ${{ env.RELEASE_TAG }}
|
||||
git config user.name "github-actions"
|
||||
git config user.email "<noreply@github.com>"
|
||||
git add prowler/config/config.py pyproject.toml
|
||||
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify
|
||||
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}"
|
||||
git push -f origin ${{ env.RELEASE_TAG }}
|
||||
poetry build
|
||||
- name: Publish prowler package to PyPI
|
||||
run: |
|
||||
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
|
||||
poetry publish
|
||||
- name: Replicate PyPi Package
|
||||
run: |
|
||||
rm -rf ./dist && rm -rf ./build && rm -rf prowler_cloud.egg-info
|
||||
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
|
||||
pip install toml
|
||||
python util/replicate_pypi_package.py
|
||||
python -m build
|
||||
- name: Publish prowler package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
poetry build
|
||||
- name: Publish prowler-cloud package to PyPI
|
||||
run: |
|
||||
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
|
||||
poetry publish
|
||||
# Create pull request with new version
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}."
|
||||
branch: release-${{ env.RELEASE_TAG }}
|
||||
labels: "status/waiting-for-revision, severity/low"
|
||||
title: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
This PR updates Prowler Version to ${{ env.RELEASE_TAG }}.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services."
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low"
|
||||
|
||||
@@ -13,6 +13,13 @@ repos:
|
||||
- id: pretty-format-json
|
||||
args: ["--autofix", --no-sort-keys, --no-ensure-ascii]
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.7.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.9.0
|
||||
|
||||
23
.readthedocs.yaml
Normal file
23
.readthedocs.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
# .readthedocs.yaml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
tools:
|
||||
python: "3.9"
|
||||
jobs:
|
||||
post_create_environment:
|
||||
# Install poetry
|
||||
# https://python-poetry.org/docs/#installing-manually
|
||||
- pip install poetry
|
||||
# Tell poetry to not use a virtual environment
|
||||
- poetry config virtualenvs.create false
|
||||
post_install:
|
||||
- poetry install -E docs
|
||||
|
||||
mkdocs:
|
||||
configuration: mkdocs.yml
|
||||
@@ -16,6 +16,7 @@ USER prowler
|
||||
WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler
|
||||
|
||||
# Install dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
@@ -26,7 +27,7 @@ RUN pip install --no-cache-dir --upgrade pip && \
|
||||
|
||||
# Remove Prowler directory and build files
|
||||
USER 0
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/build /home/prowler/prowler_cloud.egg-info
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
|
||||
|
||||
USER prowler
|
||||
ENTRYPOINT ["prowler"]
|
||||
|
||||
4
Makefile
4
Makefile
@@ -24,11 +24,11 @@ lint: ## Lint Code
|
||||
|
||||
##@ PyPI
|
||||
pypi-clean: ## Delete the distribution files
|
||||
rm -rf ./dist && rm -rf ./build && rm -rf prowler_cloud.egg-info
|
||||
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
|
||||
|
||||
pypi-build: ## Build package
|
||||
$(MAKE) pypi-clean && \
|
||||
python3 -m build
|
||||
poetry build
|
||||
|
||||
pypi-upload: ## Upload package
|
||||
python3 -m twine upload --repository pypi dist/*
|
||||
|
||||
42
Pipfile
42
Pipfile
@@ -1,42 +0,0 @@
|
||||
[[source]]
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
colorama = "0.4.4"
|
||||
boto3 = "1.26.74"
|
||||
arnparse = "0.0.2"
|
||||
botocore = "1.29.75"
|
||||
pydantic = "1.10.5"
|
||||
schema = "0.7.5"
|
||||
shodan = "1.28.0"
|
||||
detect-secrets = "1.4.0"
|
||||
alive-progress = "2.4.1"
|
||||
tabulate = "0.9.0"
|
||||
azure-identity = "1.12.0"
|
||||
azure-storage-blob = "12.14.1"
|
||||
msgraph-core = "0.2.2"
|
||||
azure-mgmt-subscription = "3.1.1"
|
||||
azure-mgmt-authorization = "3.0.0"
|
||||
azure-mgmt-security = "3.0.0"
|
||||
azure-mgmt-storage = "21.0.0"
|
||||
|
||||
[dev-packages]
|
||||
black = "22.10.0"
|
||||
pylint = "2.16.2"
|
||||
flake8 = "5.0.4"
|
||||
bandit = "1.7.4"
|
||||
safety = "2.3.1"
|
||||
vulture = "2.7"
|
||||
moto = "4.1.3"
|
||||
docker = "6.0.0"
|
||||
openapi-spec-validator = "0.5.5"
|
||||
pytest = "7.2.1"
|
||||
pytest-xdist = "3.2.0"
|
||||
coverage = "7.1.0"
|
||||
sure = "2.0.1"
|
||||
freezegun = "1.2.1"
|
||||
|
||||
[requires]
|
||||
python_version = "3.9"
|
||||
1703
Pipfile.lock
generated
1703
Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
11
README.md
11
README.md
@@ -13,8 +13,9 @@
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
|
||||
<a href="https://pypi.org/project/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
|
||||
<a href="https://pypi.python.org/pypi/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
|
||||
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg"></a>
|
||||
<a href="https://pypistats.org/packages/prowler-cloud"><img alt="PyPI Prowler-Cloud Downloads" src="https://img.shields.io/pypi/dw/prowler-cloud.svg"></a>
|
||||
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=prowler%20downloads"></a>
|
||||
<a href="https://pypistats.org/packages/prowler-cloud"><img alt="PyPI Prowler-Cloud Downloads" src="https://img.shields.io/pypi/dw/prowler-cloud.svg?label=prowler-cloud%20downloads"></a>
|
||||
<a href="https://formulae.brew.sh/formula/prowler#default"><img alt="Brew Prowler Downloads" src="https://img.shields.io/homebrew/installs/dm/prowler?label=brew%20downloads"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/toniblyx/prowler"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/cloud/build/toniblyx/prowler"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/image-size/toniblyx/prowler"></a>
|
||||
@@ -64,13 +65,13 @@ The container images are available here:
|
||||
|
||||
## From Github
|
||||
|
||||
Python >= 3.9 is required with pip and pipenv:
|
||||
Python >= 3.9 is required with pip and poetry:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
pipenv shell
|
||||
pipenv install
|
||||
poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
```
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
# Prowler Documentation
|
||||
|
||||
**Welcome to [Prowler Open Source v3](https://github.com/prowler-cloud/prowler/) Documentation!** 📄
|
||||
**Welcome to [Prowler Open Source v3](https://github.com/prowler-cloud/prowler/) Documentation!** 📄
|
||||
|
||||
For **Prowler v2 Documentation**, please go [here](https://github.com/prowler-cloud/prowler/tree/2.12.0) to the branch and its README.md.
|
||||
|
||||
@@ -118,7 +118,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
./configure --enable-optimizations
|
||||
sudo make altinstall
|
||||
python3.9 --version
|
||||
cd
|
||||
cd
|
||||
```
|
||||
_Commands_:
|
||||
|
||||
|
||||
2621
poetry.lock
generated
Normal file
2621
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -11,6 +11,7 @@ from prowler.lib.check.check import (
|
||||
exclude_services_to_run,
|
||||
execute_checks,
|
||||
get_checks_from_input_arn,
|
||||
get_regions_from_audit_resources,
|
||||
list_categories,
|
||||
list_services,
|
||||
print_categories,
|
||||
@@ -136,6 +137,9 @@ def prowler():
|
||||
|
||||
# Once the audit_info is set and we have the eventual checks from arn, it is time to exclude the others
|
||||
if audit_info.audit_resources:
|
||||
audit_info.audited_regions = get_regions_from_audit_resources(
|
||||
audit_info.audit_resources
|
||||
)
|
||||
checks_to_execute = get_checks_from_input_arn(
|
||||
audit_info.audit_resources, provider
|
||||
)
|
||||
|
||||
@@ -9,7 +9,7 @@ from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "3.2.1"
|
||||
prowler_version = "3.2.3"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
|
||||
|
||||
@@ -18,28 +18,17 @@ banner_color = "\033[1;92m"
|
||||
|
||||
# Compliance
|
||||
compliance_specification_dir = "./compliance"
|
||||
available_compliance_frameworks = [
|
||||
"ens_rd2022_aws",
|
||||
"cis_1.4_aws",
|
||||
"cis_1.5_aws",
|
||||
"aws_audit_manager_control_tower_guardrails_aws",
|
||||
"aws_foundational_security_best_practices_aws",
|
||||
"cisa_aws",
|
||||
"fedramp_low_revision_4_aws",
|
||||
"fedramp_moderate_revision_4_aws",
|
||||
"ffiec_aws",
|
||||
"gdpr_aws",
|
||||
"gxp_eu_annex_11_aws",
|
||||
"gxp_21_cfr_part_11_aws",
|
||||
"hipaa_aws",
|
||||
"nist_800_53_revision_4_aws",
|
||||
"nist_800_53_revision_5_aws",
|
||||
"nist_800_171_revision_2_aws",
|
||||
"nist_csf_1.1_aws",
|
||||
"pci_3.2.1_aws",
|
||||
"rbi_cyber_security_framework_aws",
|
||||
"soc2_aws",
|
||||
]
|
||||
compliance_aws_dir = "./prowler/compliance/aws"
|
||||
available_compliance_frameworks = []
|
||||
with os.scandir(compliance_aws_dir) as files:
|
||||
files = [
|
||||
file.name
|
||||
for file in files
|
||||
if file.is_file()
|
||||
and file.name.endswith(".json")
|
||||
and available_compliance_frameworks.append(file.name.removesuffix(".json"))
|
||||
]
|
||||
|
||||
# AWS services-regions matrix json
|
||||
aws_services_json_file = "aws_regions_by_service.json"
|
||||
|
||||
|
||||
@@ -511,19 +511,61 @@ def get_checks_from_input_arn(audit_resources: list, provider: str) -> set:
|
||||
checks_from_arn = set()
|
||||
# Handle if there are audit resources so only their services are executed
|
||||
if audit_resources:
|
||||
service_list = []
|
||||
services_without_subservices = ["guardduty", "kms", "s3", "elb"]
|
||||
service_list = set()
|
||||
sub_service_list = set()
|
||||
for resource in audit_resources:
|
||||
service = resource.split(":")[2]
|
||||
# Parse services when they are different in the ARNs
|
||||
if service == "lambda":
|
||||
service = "awslambda"
|
||||
if service == "elasticloadbalancing":
|
||||
service = "elb"
|
||||
elif service == "logs":
|
||||
service = "cloudwatch"
|
||||
service_list.append(service)
|
||||
sub_service = resource.split(":")[5].split("/")[0].replace("-", "_")
|
||||
|
||||
checks_from_arn = recover_checks_from_service(service_list, provider)
|
||||
if (
|
||||
service != "wafv2" and service != "waf"
|
||||
): # WAF Services does not have checks
|
||||
# Parse services when they are different in the ARNs
|
||||
if service == "lambda":
|
||||
service = "awslambda"
|
||||
if service == "elasticloadbalancing":
|
||||
service = "elb"
|
||||
elif service == "logs":
|
||||
service = "cloudwatch"
|
||||
service_list.add(service)
|
||||
|
||||
# Get subservices to execute only applicable checks
|
||||
if service not in services_without_subservices:
|
||||
# Parse some specific subservices
|
||||
if service == "ec2":
|
||||
if sub_service == "security_group":
|
||||
sub_service = "securitygroup"
|
||||
if sub_service == "network_acl":
|
||||
sub_service = "networkacl"
|
||||
if sub_service == "image":
|
||||
sub_service = "ami"
|
||||
if service == "rds":
|
||||
if sub_service == "cluster_snapshot":
|
||||
sub_service = "snapshot"
|
||||
sub_service_list.add(sub_service)
|
||||
else:
|
||||
sub_service_list.add(service)
|
||||
|
||||
checks = recover_checks_from_service(service_list, provider)
|
||||
|
||||
# Filter only checks with audited subservices
|
||||
for check in checks:
|
||||
if any(sub_service in check for sub_service in sub_service_list):
|
||||
if not (sub_service == "policy" and "password_policy" in check):
|
||||
checks_from_arn.add(check)
|
||||
|
||||
# Return final checks list
|
||||
return checks_from_arn
|
||||
return sorted(checks_from_arn)
|
||||
|
||||
|
||||
def get_regions_from_audit_resources(audit_resources: list) -> list:
|
||||
"""get_regions_from_audit_resources gets the regions from the audit resources arns"""
|
||||
audited_regions = []
|
||||
for resource in audit_resources:
|
||||
region = resource.split(":")[3]
|
||||
if region and region not in audited_regions: # Check if arn has a region
|
||||
audited_regions.append(region)
|
||||
if audited_regions:
|
||||
return audited_regions
|
||||
return None
|
||||
|
||||
@@ -1630,6 +1630,13 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"cloudtrail-data": {
|
||||
"regions": {
|
||||
"aws": [],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"cloudwatch": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -2577,17 +2584,17 @@
|
||||
"af-south-1",
|
||||
"ap-east-1",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-3",
|
||||
"ap-southeast-1",
|
||||
"ca-central-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-3",
|
||||
"us-east-1",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-southeast-4",
|
||||
"eu-west-2",
|
||||
"me-south-1",
|
||||
"eu-central-2",
|
||||
"sa-east-1",
|
||||
"us-east-2",
|
||||
"us-west-2",
|
||||
@@ -2596,7 +2603,9 @@
|
||||
"eu-central-1",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-west-2",
|
||||
"me-central-1",
|
||||
"me-south-1",
|
||||
"us-west-1"
|
||||
],
|
||||
"aws-cn": [
|
||||
@@ -3325,20 +3334,23 @@
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-2",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-southeast-2",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-west-2",
|
||||
"us-west-1"
|
||||
"us-west-1",
|
||||
"us-west-2"
|
||||
],
|
||||
"aws-cn": [
|
||||
"cn-northwest-1",
|
||||
"cn-north-1"
|
||||
],
|
||||
"aws-us-gov": []
|
||||
"aws-us-gov": [
|
||||
"us-gov-east-1",
|
||||
"us-gov-west-1"
|
||||
]
|
||||
}
|
||||
},
|
||||
"emr-serverless": {
|
||||
@@ -5053,8 +5065,8 @@
|
||||
"regions": {
|
||||
"aws": [
|
||||
"af-south-1",
|
||||
"ap-northeast-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-3",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-north-1",
|
||||
@@ -5063,14 +5075,15 @@
|
||||
"us-west-1",
|
||||
"ap-east-1",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"me-south-1",
|
||||
"us-west-2",
|
||||
"ap-south-1",
|
||||
"ap-southeast-2",
|
||||
"eu-west-2",
|
||||
"sa-east-1",
|
||||
"us-east-2"
|
||||
],
|
||||
@@ -8059,6 +8072,22 @@
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"simpledb": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-northeast-1",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"eu-west-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
"us-west-1",
|
||||
"us-west-2"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"simspaceweaver": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -8902,12 +8931,13 @@
|
||||
"ap-south-1",
|
||||
"ap-southeast-1",
|
||||
"ca-central-1",
|
||||
"eu-north-1",
|
||||
"eu-central-2",
|
||||
"eu-west-1",
|
||||
"us-east-2",
|
||||
"us-west-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"me-south-1",
|
||||
"us-west-2"
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import threading
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.config.config import timestamp_utc
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
|
||||
from prowler.providers.aws.aws_provider import generate_regional_clients
|
||||
@@ -44,12 +46,26 @@ class ACM:
|
||||
certificate["CertificateArn"], self.audit_resources
|
||||
)
|
||||
):
|
||||
if "NotAfter" in certificate:
|
||||
# We need to get the TZ info to be able to do the math
|
||||
certificate_expiration_time = (
|
||||
certificate["NotAfter"]
|
||||
- datetime.now(
|
||||
certificate["NotAfter"].tzinfo
|
||||
if hasattr(certificate["NotAfter"], "tzinfo")
|
||||
else None
|
||||
)
|
||||
).days
|
||||
else:
|
||||
certificate_expiration_time = 0
|
||||
self.certificates.append(
|
||||
Certificate(
|
||||
certificate["CertificateArn"],
|
||||
certificate["DomainName"],
|
||||
False,
|
||||
regional_client.region,
|
||||
arn=certificate["CertificateArn"],
|
||||
name=certificate["DomainName"],
|
||||
type=certificate["Type"],
|
||||
expiration_days=certificate_expiration_time,
|
||||
transparency_logging=False,
|
||||
region=regional_client.region,
|
||||
)
|
||||
)
|
||||
except Exception as error:
|
||||
@@ -65,13 +81,6 @@ class ACM:
|
||||
response = regional_client.describe_certificate(
|
||||
CertificateArn=certificate.arn
|
||||
)["Certificate"]
|
||||
certificate.type = response["Type"]
|
||||
if "NotAfter" in response:
|
||||
certificate.expiration_days = (
|
||||
response["NotAfter"] - timestamp_utc
|
||||
).days
|
||||
else:
|
||||
certificate.expiration_days = 0
|
||||
if (
|
||||
response["Options"]["CertificateTransparencyLoggingPreference"]
|
||||
== "ENABLED"
|
||||
@@ -83,23 +92,10 @@ class ACM:
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Certificate:
|
||||
class Certificate(BaseModel):
|
||||
arn: str
|
||||
name: str
|
||||
type: str
|
||||
expiration_days: int
|
||||
transparency_logging: bool
|
||||
transparency_logging: Optional[bool]
|
||||
region: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
arn,
|
||||
name,
|
||||
transparency_logging,
|
||||
region,
|
||||
):
|
||||
self.arn = arn
|
||||
self.name = name
|
||||
self.transparency_logging = transparency_logging
|
||||
self.region = region
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/using-cfn-protect-stacks.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudformation update-termination-protection --region us-east-1 --stack-name <STACK_NAME> --enable-termination-protection",
|
||||
"CLI": "aws cloudformation update-termination-protection --region <REGION_NAME> --stack-name <STACK_NAME> --enable-termination-protection",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
|
||||
@@ -12,7 +12,10 @@ class cloudfront_distributions_field_level_encryption_enabled(Check):
|
||||
report.region = distribution.region
|
||||
report.resource_arn = distribution.arn
|
||||
report.resource_id = distribution.id
|
||||
if distribution.default_cache_config.field_level_encryption_id:
|
||||
if (
|
||||
distribution.default_cache_config
|
||||
and distribution.default_cache_config.field_level_encryption_id
|
||||
):
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"CloudFront Distribution {distribution.id} has Field Level Encryption enabled"
|
||||
else:
|
||||
|
||||
@@ -16,13 +16,15 @@ class cloudfront_distributions_https_enabled(Check):
|
||||
report.resource_arn = distribution.arn
|
||||
report.resource_id = distribution.id
|
||||
if (
|
||||
distribution.default_cache_config.viewer_protocol_policy
|
||||
distribution.default_cache_config
|
||||
and distribution.default_cache_config.viewer_protocol_policy
|
||||
== ViewerProtocolPolicy.allow_all
|
||||
):
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"CloudFront Distribution {distribution.id} viewers can use HTTP or HTTPS"
|
||||
elif (
|
||||
distribution.default_cache_config.viewer_protocol_policy
|
||||
distribution.default_cache_config
|
||||
and distribution.default_cache_config.viewer_protocol_policy
|
||||
== ViewerProtocolPolicy.redirect_to_https
|
||||
):
|
||||
report.status = "PASS"
|
||||
@@ -30,7 +32,8 @@ class cloudfront_distributions_https_enabled(Check):
|
||||
f"CloudFront Distribution {distribution.id} has redirect to HTTPS"
|
||||
)
|
||||
elif (
|
||||
distribution.default_cache_config.viewer_protocol_policy
|
||||
distribution.default_cache_config
|
||||
and distribution.default_cache_config.viewer_protocol_policy
|
||||
== ViewerProtocolPolicy.https_only
|
||||
):
|
||||
report.status = "PASS"
|
||||
|
||||
@@ -12,9 +12,9 @@ class cloudfront_distributions_logging_enabled(Check):
|
||||
report.region = distribution.region
|
||||
report.resource_arn = distribution.arn
|
||||
report.resource_id = distribution.id
|
||||
if (
|
||||
distribution.logging_enabled
|
||||
or distribution.default_cache_config.realtime_log_config_arn
|
||||
if distribution.logging_enabled or (
|
||||
distribution.default_cache_config
|
||||
and distribution.default_cache_config.realtime_log_config_arn
|
||||
):
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
|
||||
@@ -10,6 +10,7 @@ class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails:
|
||||
if trail.name:
|
||||
trail_bucket_is_in_account = False
|
||||
trail_bucket = trail.s3_bucket
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
@@ -23,19 +24,23 @@ class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check):
|
||||
for bucket in s3_client.buckets:
|
||||
# Here we need to ensure that acl_grantee is filled since if we don't have permissions to query the api for a concrete region
|
||||
# (for example due to a SCP) we are going to try access an attribute from a None type
|
||||
if trail_bucket == bucket.name and bucket.acl_grantees:
|
||||
for grant in bucket.acl_grantees:
|
||||
if (
|
||||
grant.URI
|
||||
== "http://acs.amazonaws.com/groups/global/AllUsers"
|
||||
):
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from multiregion trail {trail.name} is publicly accessible"
|
||||
else:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from single region trail {trail.name} is publicly accessible"
|
||||
break
|
||||
|
||||
if trail_bucket == bucket.name:
|
||||
trail_bucket_is_in_account = True
|
||||
if bucket.acl_grantees:
|
||||
for grant in bucket.acl_grantees:
|
||||
if (
|
||||
grant.URI
|
||||
== "http://acs.amazonaws.com/groups/global/AllUsers"
|
||||
):
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from multiregion trail {trail.name} is publicly accessible"
|
||||
else:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from single region trail {trail.name} is publicly accessible"
|
||||
break
|
||||
# check if trail bucket is a cross account bucket
|
||||
if not trail_bucket_is_in_account:
|
||||
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually"
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -21,8 +21,8 @@ class directoryservice_ldap_certificate_expiration(Check):
|
||||
remaining_days_to_expire = (
|
||||
certificate.expiry_date_time
|
||||
- datetime.now(
|
||||
certificate.expiry_date_time.tz_info
|
||||
if hasattr(certificate.expiry_date_time, "tz_info")
|
||||
certificate.expiry_date_time.tzinfo
|
||||
if hasattr(certificate.expiry_date_time, "tzinfo")
|
||||
else None
|
||||
)
|
||||
).days
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import threading
|
||||
from typing import Optional
|
||||
|
||||
from botocore.client import ClientError
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
@@ -137,7 +138,11 @@ class ELBv2:
|
||||
conditions=rule["Conditions"],
|
||||
)
|
||||
)
|
||||
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "ListenerNotFound":
|
||||
logger.warning(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
|
||||
@@ -79,7 +79,7 @@ class EMR:
|
||||
# Master Node Security Groups
|
||||
master_node_security_group = cluster_info["Cluster"][
|
||||
"Ec2InstanceAttributes"
|
||||
]["EmrManagedMasterSecurityGroup"]
|
||||
].get("EmrManagedMasterSecurityGroup")
|
||||
master_node_additional_security_groups = None
|
||||
if (
|
||||
"AdditionalMasterSecurityGroups"
|
||||
@@ -97,6 +97,7 @@ class EMR:
|
||||
slave_node_security_group = cluster_info["Cluster"][
|
||||
"Ec2InstanceAttributes"
|
||||
]["EmrManagedSlaveSecurityGroup"]
|
||||
slave_node_additional_security_groups = []
|
||||
if (
|
||||
"AdditionalSlaveSecurityGroups"
|
||||
in cluster_info["Cluster"]["Ec2InstanceAttributes"]
|
||||
@@ -170,7 +171,7 @@ class ClusterStatus(Enum):
|
||||
|
||||
|
||||
class Node(BaseModel):
|
||||
security_group_id: str = ""
|
||||
security_group_id: Optional[str] = ""
|
||||
additional_security_groups_id: Optional[list[str]] = []
|
||||
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ class iam_no_custom_policy_permissive_role_assumption(Check):
|
||||
if (
|
||||
statement["Effect"] == "Allow"
|
||||
and "Action" in statement
|
||||
and "Resource" in statement
|
||||
and "*" in statement["Resource"]
|
||||
):
|
||||
if type(statement["Action"]) == list:
|
||||
|
||||
@@ -76,7 +76,12 @@ class KMS:
|
||||
logger.info("KMS - Get Key Rotation Status...")
|
||||
try:
|
||||
for key in self.keys:
|
||||
if "EXTERNAL" not in key.origin and "AWS" not in key.manager:
|
||||
if (
|
||||
key.origin
|
||||
and key.manager
|
||||
and "EXTERNAL" not in key.origin
|
||||
and "AWS" not in key.manager
|
||||
):
|
||||
regional_client = self.regional_clients[key.region]
|
||||
key.rotation_enabled = regional_client.get_key_rotation_status(
|
||||
KeyId=key.id
|
||||
@@ -90,7 +95,9 @@ class KMS:
|
||||
logger.info("KMS - Get Key Policy...")
|
||||
try:
|
||||
for key in self.keys:
|
||||
if key.manager == "CUSTOMER": # only customer KMS have policies
|
||||
if (
|
||||
key.manager and key.manager == "CUSTOMER"
|
||||
): # only customer KMS have policies
|
||||
regional_client = self.regional_clients[key.region]
|
||||
key.policy = json.loads(
|
||||
regional_client.get_key_policy(
|
||||
|
||||
@@ -249,7 +249,7 @@ class VpcEndpoint(BaseModel):
|
||||
id: str
|
||||
vpc_id: str
|
||||
state: str
|
||||
policy_document: dict
|
||||
policy_document: Optional[dict]
|
||||
owner_id: str
|
||||
region: str
|
||||
|
||||
|
||||
120
pyproject.toml
120
pyproject.toml
@@ -1,66 +1,86 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
|
||||
[project]
|
||||
name = "prowler-cloud"
|
||||
# https://peps.python.org/pep-0440/
|
||||
version = "3.2.1"
|
||||
authors = [{ name = "Toni de la Fuente", email = "toni@blyx.com" }]
|
||||
maintainers = [
|
||||
{ name = "Sergio Garcia", email = "sergio@verica.io" },
|
||||
{ name = "Nacho Rivera", email = "nacho@verica.io" },
|
||||
{ name = "Pepe Fagoaga", email = "pepe@verica.io" },
|
||||
]
|
||||
description = "Prowler is an Open Source security tool to perform Cloud Security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains more than 240 controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks."
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.9"
|
||||
license = { text = "Apache-2.0" }
|
||||
# https://peps.python.org/pep-0621/
|
||||
[tool.poetry]
|
||||
authors = ["Toni de la Fuente <toni@blyx.com>"]
|
||||
classifiers = [
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"License :: OSI Approved :: Apache Software License"
|
||||
]
|
||||
|
||||
# https://peps.python.org/pep-0631/
|
||||
dependencies = [
|
||||
"colorama ~=0.4.4",
|
||||
"boto3 ~=1.26.17",
|
||||
"arnparse ~=0.0.2",
|
||||
"botocore ~=1.29.18",
|
||||
"pydantic ~=1.9.1",
|
||||
"schema ~=0.7.5",
|
||||
"shodan ~=1.28.0",
|
||||
"detect-secrets ~=1.4.0",
|
||||
"alive-progress ~=2.4.1",
|
||||
"tabulate ~=0.9.0",
|
||||
"azure-identity ~=1.12.0",
|
||||
"azure-storage-blob ~=12.14.1",
|
||||
"msgraph-core ~=0.2.2",
|
||||
"azure-mgmt-subscription ~=3.1.1",
|
||||
"azure-mgmt-authorization ~=3.0.0",
|
||||
"azure-mgmt-security ~=3.0.0",
|
||||
"azure-mgmt-storage ~=21.0.0",
|
||||
description = "Prowler is an Open Source security tool to perform Cloud Security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains more than 240 controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks."
|
||||
license = "Apache-2.0"
|
||||
maintainers = [
|
||||
"Sergio Garcia <sergio@verica.io>",
|
||||
"Nacho Rivera <nacho@verica.io>",
|
||||
"Pepe Fagoaga <pepe@verica.io>"
|
||||
]
|
||||
name = "prowler"
|
||||
packages = [
|
||||
{include = "prowler"}
|
||||
]
|
||||
readme = "README.md"
|
||||
version = "3.2.3"
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/prowler-cloud/prowler"
|
||||
"Documentation" = "https://docs.prowler.cloud"
|
||||
"Issue tracker" = "https://github.com/prowler-cloud/prowler/issues"
|
||||
"Changelog" = "https://github.com/prowler-cloud/prowler/releases"
|
||||
[tool.poetry.dependencies]
|
||||
alive-progress = "3.0.1"
|
||||
arnparse = "0.0.2"
|
||||
azure-identity = "1.12.0"
|
||||
azure-mgmt-authorization = "3.0.0"
|
||||
azure-mgmt-security = "3.0.0"
|
||||
azure-mgmt-storage = "21.0.0"
|
||||
azure-mgmt-subscription = "3.1.1"
|
||||
azure-storage-blob = "12.15.0"
|
||||
boto3 = "1.26.79"
|
||||
botocore = "1.29.79"
|
||||
colorama = "0.4.6"
|
||||
detect-secrets = "1.4.0"
|
||||
mkdocs = {version = "1.4.2", optional = true}
|
||||
mkdocs-material = {version = "9.0.14", optional = true}
|
||||
msgraph-core = "0.2.2"
|
||||
pydantic = "1.10.5"
|
||||
python = "^3.9"
|
||||
schema = "0.7.5"
|
||||
shodan = "1.28.0"
|
||||
tabulate = "0.9.0"
|
||||
|
||||
# https://setuptools.pypa.io/en/latest/userguide/package_discovery.html
|
||||
[tool.setuptools]
|
||||
include-package-data = true
|
||||
package-dir = { "prowler" = "prowler" }
|
||||
[tool.poetry.extras]
|
||||
docs = ["mkdocs", "mkdocs-material"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
# We need the above pattern to include our data files
|
||||
"*" = ["*.yaml", "*.json"]
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.4"
|
||||
black = "22.10.0"
|
||||
coverage = "7.1.0"
|
||||
docker = "6.0.0"
|
||||
flake8 = "5.0.4"
|
||||
freezegun = "1.2.1"
|
||||
moto = "4.1.2"
|
||||
openapi-spec-validator = "0.5.5"
|
||||
pylint = "2.16.2"
|
||||
pytest = "7.2.1"
|
||||
pytest-xdist = "3.2.0"
|
||||
safety = "2.3.5"
|
||||
sure = "2.0.1"
|
||||
vulture = "2.7"
|
||||
|
||||
[project.scripts]
|
||||
[tool.poetry.scripts]
|
||||
prowler = "prowler.__main__:prowler"
|
||||
|
||||
[tool.poetry.urls]
|
||||
"Changelog" = "https://github.com/prowler-cloud/prowler/releases"
|
||||
"Documentation" = "https://docs.prowler.cloud"
|
||||
"Homepage" = "https://github.com/prowler-cloud/prowler"
|
||||
"Issue tracker" = "https://github.com/prowler-cloud/prowler/issues"
|
||||
|
||||
[tool.poetry-version-plugin]
|
||||
source = "init"
|
||||
|
||||
[tool.poetry_bumpversion.file."prowler/config/config.py"]
|
||||
replace = 'prowler_version = "{new_version}"'
|
||||
search = 'prowler_version = "{current_version}"'
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
pythonpath = [
|
||||
"."
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
# Documentation static site generator & deployment tool
|
||||
mkdocs>=1.3.0
|
||||
mkdocs-material>=8.2.1
|
||||
@@ -9,6 +9,7 @@ from prowler.lib.check.check import (
|
||||
exclude_checks_to_run,
|
||||
exclude_services_to_run,
|
||||
get_checks_from_input_arn,
|
||||
get_regions_from_audit_resources,
|
||||
list_modules,
|
||||
list_services,
|
||||
parse_checks_from_file,
|
||||
@@ -288,14 +289,27 @@ class Test_Check:
|
||||
def test_get_checks_from_input_arn(self):
|
||||
audit_resources = ["arn:aws:lambda:us-east-1:123456789:function:test-lambda"]
|
||||
provider = "aws"
|
||||
expected_checks = {
|
||||
"awslambda_function_url_cors_policy",
|
||||
expected_checks = [
|
||||
"awslambda_function_invoke_api_operations_cloudtrail_logging_enabled",
|
||||
"awslambda_function_no_secrets_in_code",
|
||||
}
|
||||
"awslambda_function_url_cors_policy",
|
||||
]
|
||||
recovered_checks = get_checks_from_input_arn(audit_resources, provider)
|
||||
assert recovered_checks == expected_checks
|
||||
|
||||
def test_get_regions_from_audit_resources(self):
|
||||
audit_resources = [
|
||||
"arn:aws:lambda:us-east-1:123456789:function:test-lambda",
|
||||
"arn:aws:iam::106908755756:policy/test",
|
||||
"arn:aws:ec2:eu-west-1:106908755756:security-group/sg-test",
|
||||
]
|
||||
expected_regions = [
|
||||
"us-east-1",
|
||||
"eu-west-1",
|
||||
]
|
||||
recovered_regions = get_regions_from_audit_resources(audit_resources)
|
||||
assert recovered_regions == expected_regions
|
||||
|
||||
# def test_parse_checks_from_compliance_framework_two(self):
|
||||
# test_case = {
|
||||
# "input": {"compliance_frameworks": ["cis_v1.4_aws", "ens_v3_aws"]},
|
||||
|
||||
@@ -1,59 +1,92 @@
|
||||
import uuid
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import client
|
||||
from moto import mock_acm
|
||||
from prowler.providers.aws.services.acm.acm_service import Certificate
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = 123456789012
|
||||
DAYS_TO_EXPIRE_THRESHOLD = 7
|
||||
|
||||
|
||||
class Test_acm_certificates_expiration_check:
|
||||
@mock_acm
|
||||
def test_acm_certificate_expirated(self):
|
||||
# Generate ACM Client
|
||||
acm_client = client("acm", region_name=AWS_REGION)
|
||||
# Request ACM certificate
|
||||
certificate = acm_client.request_certificate(
|
||||
DomainName="test.com",
|
||||
)
|
||||
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
|
||||
from prowler.providers.aws.services.acm.acm_service import ACM
|
||||
|
||||
current_audit_info.audited_partition = "aws"
|
||||
def test_no_acm_certificates(self):
|
||||
acm_client = mock.MagicMock
|
||||
acm_client.certificates = []
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.acm.acm_certificates_expiration_check.acm_certificates_expiration_check.acm_client",
|
||||
new=ACM(current_audit_info),
|
||||
) as service_client:
|
||||
"prowler.providers.aws.services.acm.acm_service.ACM",
|
||||
new=acm_client,
|
||||
):
|
||||
# Test Check
|
||||
from prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled import (
|
||||
acm_certificates_transparency_logs_enabled,
|
||||
)
|
||||
|
||||
check = acm_certificates_transparency_logs_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 0
|
||||
|
||||
def test_acm_certificate_expirated(self):
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:certificate/{str(uuid.uuid4())}"
|
||||
certificate_name = "test-certificate.com"
|
||||
certificate_type = "AMAZON_ISSUED"
|
||||
|
||||
acm_client = mock.MagicMock
|
||||
acm_client.certificates = [
|
||||
Certificate(
|
||||
arn=certificate_arn,
|
||||
name=certificate_name,
|
||||
type=certificate_type,
|
||||
expiration_days=5,
|
||||
transparency_logging=True,
|
||||
region=AWS_REGION,
|
||||
)
|
||||
]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.acm.acm_service.ACM",
|
||||
new=acm_client,
|
||||
):
|
||||
# Test Check
|
||||
from prowler.providers.aws.services.acm.acm_certificates_expiration_check.acm_certificates_expiration_check import (
|
||||
acm_certificates_expiration_check,
|
||||
)
|
||||
|
||||
service_client.certificates[0].expiration_days = 5
|
||||
check = acm_certificates_expiration_check()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert result[0].resource_id == "test.com"
|
||||
assert result[0].resource_arn == certificate["CertificateArn"]
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"ACM Certificate for {certificate_name} is about to expire in {DAYS_TO_EXPIRE_THRESHOLD} days."
|
||||
)
|
||||
assert result[0].resource_id == certificate_name
|
||||
assert result[0].resource_arn == certificate_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
|
||||
@mock_acm
|
||||
def test_acm_certificate_not_expirated(self):
|
||||
# Generate ACM Client
|
||||
acm_client = client("acm", region_name=AWS_REGION)
|
||||
# Request ACM certificate
|
||||
certificate = acm_client.request_certificate(
|
||||
DomainName="test.com",
|
||||
)
|
||||
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
|
||||
from prowler.providers.aws.services.acm.acm_service import ACM
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:certificate/{str(uuid.uuid4())}"
|
||||
certificate_name = "test-certificate.com"
|
||||
certificate_type = "AMAZON_ISSUED"
|
||||
expiration_days = 365
|
||||
|
||||
current_audit_info.audited_partition = "aws"
|
||||
acm_client = mock.MagicMock
|
||||
acm_client.certificates = [
|
||||
Certificate(
|
||||
arn=certificate_arn,
|
||||
name=certificate_name,
|
||||
type=certificate_type,
|
||||
expiration_days=expiration_days,
|
||||
transparency_logging=True,
|
||||
region=AWS_REGION,
|
||||
)
|
||||
]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.acm.acm_certificates_expiration_check.acm_certificates_expiration_check.acm_client",
|
||||
new=ACM(current_audit_info),
|
||||
"prowler.providers.aws.services.acm.acm_service.ACM",
|
||||
new=acm_client,
|
||||
):
|
||||
# Test Check
|
||||
from prowler.providers.aws.services.acm.acm_certificates_expiration_check.acm_certificates_expiration_check import (
|
||||
@@ -65,5 +98,10 @@ class Test_acm_certificates_expiration_check:
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert result[0].resource_id == "test.com"
|
||||
assert result[0].resource_arn == certificate["CertificateArn"]
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"ACM Certificate for {certificate_name} expires in {expiration_days} days."
|
||||
)
|
||||
assert result[0].resource_id == certificate_name
|
||||
assert result[0].resource_arn == certificate_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
|
||||
@@ -1,29 +1,51 @@
|
||||
import uuid
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import client
|
||||
from moto import mock_acm
|
||||
from prowler.providers.aws.services.acm.acm_service import Certificate
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = 123456789012
|
||||
|
||||
|
||||
class Test_acm_certificates_transparency_logs_enabled:
|
||||
@mock_acm
|
||||
def test_acm_certificate_with_logging(self):
|
||||
# Generate ACM Client
|
||||
acm_client = client("acm", region_name=AWS_REGION)
|
||||
# Request ACM certificate
|
||||
certificate = acm_client.request_certificate(
|
||||
DomainName="test.com",
|
||||
Options={"CertificateTransparencyLoggingPreference": "ENABLED"},
|
||||
)
|
||||
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
|
||||
from prowler.providers.aws.services.acm.acm_service import ACM
|
||||
|
||||
current_audit_info.audited_partition = "aws"
|
||||
def test_no_acm_certificates(self):
|
||||
acm_client = mock.MagicMock
|
||||
acm_client.certificates = []
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled.acm_client",
|
||||
new=ACM(current_audit_info),
|
||||
"prowler.providers.aws.services.acm.acm_service.ACM",
|
||||
new=acm_client,
|
||||
):
|
||||
# Test Check
|
||||
from prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled import (
|
||||
acm_certificates_transparency_logs_enabled,
|
||||
)
|
||||
|
||||
check = acm_certificates_transparency_logs_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 0
|
||||
|
||||
def test_acm_certificate_with_logging(self):
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:certificate/{str(uuid.uuid4())}"
|
||||
certificate_name = "test-certificate.com"
|
||||
certificate_type = "AMAZON_ISSUED"
|
||||
|
||||
acm_client = mock.MagicMock
|
||||
acm_client.certificates = [
|
||||
Certificate(
|
||||
arn=certificate_arn,
|
||||
name=certificate_name,
|
||||
type=certificate_type,
|
||||
expiration_days=365,
|
||||
transparency_logging=True,
|
||||
region=AWS_REGION,
|
||||
)
|
||||
]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.acm.acm_service.ACM",
|
||||
new=acm_client,
|
||||
):
|
||||
# Test Check
|
||||
from prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled import (
|
||||
@@ -37,35 +59,38 @@ class Test_acm_certificates_transparency_logs_enabled:
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "ACM Certificate for test.com has Certificate Transparency logging enabled."
|
||||
== f"ACM Certificate for {certificate_name} has Certificate Transparency logging enabled."
|
||||
)
|
||||
assert result[0].resource_id == "test.com"
|
||||
assert result[0].resource_arn == certificate["CertificateArn"]
|
||||
assert result[0].resource_id == certificate_name
|
||||
assert result[0].resource_arn == certificate_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
|
||||
@mock_acm
|
||||
def test_acm_certificate_without_logging(self):
|
||||
# Generate ACM Client
|
||||
acm_client = client("acm", region_name=AWS_REGION)
|
||||
# Request ACM certificate
|
||||
certificate = acm_client.request_certificate(
|
||||
DomainName="test.com",
|
||||
Options={"CertificateTransparencyLoggingPreference": "ENABLED"},
|
||||
)
|
||||
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
|
||||
from prowler.providers.aws.services.acm.acm_service import ACM
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:certificate/{str(uuid.uuid4())}"
|
||||
certificate_name = "test-certificate.com"
|
||||
certificate_type = "AMAZON_ISSUED"
|
||||
|
||||
current_audit_info.audited_partition = "aws"
|
||||
acm_client = mock.MagicMock
|
||||
acm_client.certificates = [
|
||||
Certificate(
|
||||
arn=certificate_arn,
|
||||
name=certificate_name,
|
||||
type=certificate_type,
|
||||
expiration_days=365,
|
||||
transparency_logging=False,
|
||||
region=AWS_REGION,
|
||||
)
|
||||
]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled.acm_client",
|
||||
new=ACM(current_audit_info),
|
||||
) as service_client:
|
||||
"prowler.providers.aws.services.acm.acm_service.ACM",
|
||||
new=acm_client,
|
||||
):
|
||||
# Test Check
|
||||
from prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled import (
|
||||
acm_certificates_transparency_logs_enabled,
|
||||
)
|
||||
|
||||
service_client.certificates[0].transparency_logging = False
|
||||
check = acm_certificates_transparency_logs_enabled()
|
||||
result = check.execute()
|
||||
|
||||
@@ -73,41 +98,8 @@ class Test_acm_certificates_transparency_logs_enabled:
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "ACM Certificate for test.com has Certificate Transparency logging disabled."
|
||||
== f"ACM Certificate for {certificate_name} has Certificate Transparency logging disabled."
|
||||
)
|
||||
assert result[0].resource_id == "test.com"
|
||||
assert result[0].resource_arn == certificate["CertificateArn"]
|
||||
|
||||
@mock_acm
|
||||
def test_acm_default_certificate(self):
|
||||
# Generate ACM Client
|
||||
acm_client = client("acm", region_name=AWS_REGION)
|
||||
# Request ACM certificate
|
||||
certificate = acm_client.request_certificate(
|
||||
DomainName="test.com",
|
||||
)
|
||||
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
|
||||
from prowler.providers.aws.services.acm.acm_service import ACM
|
||||
|
||||
current_audit_info.audited_partition = "aws"
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled.acm_client",
|
||||
new=ACM(current_audit_info),
|
||||
):
|
||||
# Test Check
|
||||
from prowler.providers.aws.services.acm.acm_certificates_transparency_logs_enabled.acm_certificates_transparency_logs_enabled import (
|
||||
acm_certificates_transparency_logs_enabled,
|
||||
)
|
||||
|
||||
check = acm_certificates_transparency_logs_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "ACM Certificate for test.com has Certificate Transparency logging enabled."
|
||||
)
|
||||
assert result[0].resource_id == "test.com"
|
||||
assert result[0].resource_arn == certificate["CertificateArn"]
|
||||
assert result[0].resource_id == certificate_name
|
||||
assert result[0].resource_arn == certificate_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
|
||||
@@ -1,13 +1,91 @@
|
||||
from boto3 import client, session
|
||||
from moto import mock_acm
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
import botocore
|
||||
from boto3 import session
|
||||
from freezegun import freeze_time
|
||||
from mock import patch
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.services.acm.acm_service import ACM
|
||||
|
||||
# from moto import mock_acm
|
||||
|
||||
|
||||
AWS_ACCOUNT_NUMBER = 123456789012
|
||||
AWS_REGION = "us-east-1"
|
||||
|
||||
# Mocking Access Analyzer Calls
|
||||
make_api_call = botocore.client.BaseClient._make_api_call
|
||||
|
||||
certificate_arn = (
|
||||
f"arn:aws:acm:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:certificate/{str(uuid.uuid4())}"
|
||||
)
|
||||
certificate_name = "test-certificate.com"
|
||||
certificate_type = "AMAZON_ISSUED"
|
||||
|
||||
|
||||
def mock_make_api_call(self, operation_name, kwargs):
|
||||
"""
|
||||
As you can see the operation_name has the list_analyzers snake_case form but
|
||||
we are using the ListAnalyzers form.
|
||||
Rationale -> https://github.com/boto/botocore/blob/develop/botocore/client.py#L810:L816
|
||||
|
||||
We have to mock every AWS API call using Boto3
|
||||
"""
|
||||
if operation_name == "ListCertificates":
|
||||
return {
|
||||
"CertificateSummaryList": [
|
||||
{
|
||||
"CertificateArn": certificate_arn,
|
||||
"DomainName": certificate_name,
|
||||
"SubjectAlternativeNameSummaries": [
|
||||
"test-certificate-2.com",
|
||||
],
|
||||
"HasAdditionalSubjectAlternativeNames": False,
|
||||
"Status": "ISSUED",
|
||||
"Type": certificate_type,
|
||||
"KeyAlgorithm": "RSA_4096",
|
||||
"KeyUsages": ["DIGITAL_SIGNATURE"],
|
||||
"ExtendedKeyUsages": ["TLS_WEB_SERVER_AUTHENTICATION"],
|
||||
"InUse": True,
|
||||
"Exported": False,
|
||||
"RenewalEligibility": "ELIGIBLE",
|
||||
"NotBefore": datetime(2024, 1, 1),
|
||||
"NotAfter": datetime(2024, 1, 1),
|
||||
"CreatedAt": datetime(2024, 1, 1),
|
||||
"IssuedAt": datetime(2024, 1, 1),
|
||||
"ImportedAt": datetime(2024, 1, 1),
|
||||
"RevokedAt": datetime(2024, 1, 1),
|
||||
}
|
||||
]
|
||||
}
|
||||
if operation_name == "DescribeCertificate":
|
||||
if kwargs["CertificateArn"] == certificate_arn:
|
||||
return {
|
||||
"Certificate": {
|
||||
"Options": {"CertificateTransparencyLoggingPreference": "DISABLED"},
|
||||
}
|
||||
}
|
||||
return make_api_call(self, operation_name, kwargs)
|
||||
|
||||
|
||||
# Mock generate_regional_clients()
|
||||
def mock_generate_regional_clients(service, audit_info):
|
||||
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
|
||||
regional_client.region = AWS_REGION
|
||||
return {AWS_REGION: regional_client}
|
||||
|
||||
|
||||
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
|
||||
@patch(
|
||||
"prowler.providers.aws.services.acm.acm_service.generate_regional_clients",
|
||||
new=mock_generate_regional_clients,
|
||||
)
|
||||
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
|
||||
# Freeze time
|
||||
@freeze_time("2023-01-01")
|
||||
# FIXME: Pending Moto PR to update ACM responses
|
||||
class Test_ACM_Service:
|
||||
# Mocked Audit Info
|
||||
def set_mocked_audit_info(self):
|
||||
@@ -33,7 +111,7 @@ class Test_ACM_Service:
|
||||
return audit_info
|
||||
|
||||
# Test ACM Service
|
||||
@mock_acm
|
||||
# @mock_acm
|
||||
def test_service(self):
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
@@ -41,7 +119,7 @@ class Test_ACM_Service:
|
||||
assert acm.service == "acm"
|
||||
|
||||
# Test ACM Client
|
||||
@mock_acm
|
||||
# @mock_acm
|
||||
def test_client(self):
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
@@ -50,7 +128,7 @@ class Test_ACM_Service:
|
||||
assert regional_client.__class__.__name__ == "ACM"
|
||||
|
||||
# Test ACM Session
|
||||
@mock_acm
|
||||
# @mock_acm
|
||||
def test__get_session__(self):
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
@@ -58,7 +136,7 @@ class Test_ACM_Service:
|
||||
assert acm.session.__class__.__name__ == "Session"
|
||||
|
||||
# Test ACM Session
|
||||
@mock_acm
|
||||
# @mock_acm
|
||||
def test_audited_account(self):
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
@@ -66,31 +144,22 @@ class Test_ACM_Service:
|
||||
assert acm.audited_account == AWS_ACCOUNT_NUMBER
|
||||
|
||||
# Test ACM List Certificates
|
||||
@mock_acm
|
||||
def test__list_certificates__(self):
|
||||
# @mock_acm
|
||||
def test__list_and_describe_certificates__(self):
|
||||
# Generate ACM Client
|
||||
acm_client = client("acm", region_name=AWS_REGION)
|
||||
# acm_client = client("acm", region_name=AWS_REGION)
|
||||
# Request ACM certificate
|
||||
certificate = acm_client.request_certificate(
|
||||
DomainName="test.com",
|
||||
)
|
||||
# certificate = acm_client.request_certificate(
|
||||
# DomainName="test.com",
|
||||
# )
|
||||
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
acm = ACM(audit_info)
|
||||
assert len(acm.certificates) == 1
|
||||
assert acm.certificates[0].arn == certificate["CertificateArn"]
|
||||
|
||||
# Test ACM Describe Certificates
|
||||
@mock_acm
|
||||
def test__describe_certificates__(self):
|
||||
# Generate ACM Client
|
||||
acm_client = client("acm", region_name=AWS_REGION)
|
||||
# Request ACM certificate
|
||||
certificate = acm_client.request_certificate(
|
||||
DomainName="test.com",
|
||||
)
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
acm = ACM(audit_info)
|
||||
assert acm.certificates[0].type == "AMAZON_ISSUED"
|
||||
assert acm.certificates[0].arn == certificate["CertificateArn"]
|
||||
assert acm.certificates[0].arn == certificate_arn
|
||||
assert acm.certificates[0].name == certificate_name
|
||||
assert acm.certificates[0].type == certificate_type
|
||||
assert acm.certificates[0].expiration_days == 365
|
||||
assert acm.certificates[0].transparency_logging is False
|
||||
assert acm.certificates[0].region == AWS_REGION
|
||||
|
||||
@@ -148,6 +148,7 @@ class Test_cloudtrail_logs_s3_bucket_is_not_publicly_accessible:
|
||||
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
|
||||
Cloudtrail,
|
||||
)
|
||||
from prowler.providers.aws.services.s3.s3_service import S3
|
||||
|
||||
current_audit_info.audited_partition = "aws"
|
||||
|
||||
@@ -155,19 +156,71 @@ class Test_cloudtrail_logs_s3_bucket_is_not_publicly_accessible:
|
||||
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_client",
|
||||
new=Cloudtrail(current_audit_info),
|
||||
):
|
||||
# Test Check
|
||||
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible import (
|
||||
cloudtrail_logs_s3_bucket_is_not_publicly_accessible,
|
||||
)
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.s3_client",
|
||||
new=S3(current_audit_info),
|
||||
):
|
||||
# Test Check
|
||||
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible import (
|
||||
cloudtrail_logs_s3_bucket_is_not_publicly_accessible,
|
||||
)
|
||||
|
||||
check = cloudtrail_logs_s3_bucket_is_not_publicly_accessible()
|
||||
result = check.execute()
|
||||
check = cloudtrail_logs_s3_bucket_is_not_publicly_accessible()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert result[0].resource_id == trail_name_us
|
||||
assert result[0].resource_arn == trail_us["TrailARN"]
|
||||
assert search(
|
||||
result[0].status_extended,
|
||||
f"S3 Bucket {bucket_name_us} from single region trail {trail_name_us} is not publicly accessible",
|
||||
)
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert result[0].resource_id == trail_name_us
|
||||
assert result[0].resource_arn == trail_us["TrailARN"]
|
||||
assert search(
|
||||
result[0].status_extended,
|
||||
f"S3 Bucket {bucket_name_us} from single region trail {trail_name_us} is not publicly accessible",
|
||||
)
|
||||
|
||||
@mock_cloudtrail
|
||||
@mock_s3
|
||||
def test_trail_bucket_cross_account(self):
|
||||
cloudtrail_client = client("cloudtrail", region_name="us-east-1")
|
||||
s3_client = client("s3", region_name="us-east-1")
|
||||
trail_name_us = "trail_test_us"
|
||||
bucket_name_us = "bucket_test_us"
|
||||
s3_client.create_bucket(Bucket=bucket_name_us)
|
||||
trail_us = cloudtrail_client.create_trail(
|
||||
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
|
||||
)
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
|
||||
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
|
||||
Cloudtrail,
|
||||
)
|
||||
from prowler.providers.aws.services.s3.s3_service import S3
|
||||
|
||||
current_audit_info.audited_partition = "aws"
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_client",
|
||||
new=Cloudtrail(current_audit_info),
|
||||
):
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.s3_client",
|
||||
new=S3(current_audit_info),
|
||||
) as s3_client:
|
||||
# Test Check
|
||||
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible import (
|
||||
cloudtrail_logs_s3_bucket_is_not_publicly_accessible,
|
||||
)
|
||||
|
||||
# Empty s3 buckets to simulate the bucket is in another account
|
||||
s3_client.buckets = []
|
||||
|
||||
check = cloudtrail_logs_s3_bucket_is_not_publicly_accessible()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert result[0].resource_id == trail_name_us
|
||||
assert result[0].resource_arn == trail_us["TrailARN"]
|
||||
assert search(
|
||||
"is a cross-account bucket in another account out of Prowler's permissions scope",
|
||||
result[0].status_extended,
|
||||
)
|
||||
|
||||
@@ -16,6 +16,7 @@ class Test_trustedadvisor_errors_and_warnings:
|
||||
trustedadvisor_client.checks = []
|
||||
trustedadvisor_client.enabled = False
|
||||
trustedadvisor_client.account = AWS_ACCOUNT_NUMBER
|
||||
trustedadvisor_client.region = AWS_REGION
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.trustedadvisor.trustedadvisor_service.TrustedAdvisor",
|
||||
trustedadvisor_client,
|
||||
|
||||
@@ -2,7 +2,7 @@ import toml
|
||||
|
||||
data = toml.load("pyproject.toml")
|
||||
# Modify field
|
||||
data["project"]["name"] = "prowler"
|
||||
data["tool"]["poetry"]["name"] = "prowler-cloud"
|
||||
|
||||
# To use the dump function, you need to open the file in 'write' mode
|
||||
f = open("pyproject.toml", "w")
|
||||
|
||||
Reference in New Issue
Block a user