Compare commits

..

46 Commits
3.2.0 ... 3.2.2

Author SHA1 Message Date
github-actions
bffe2a2c63 chore(release): 3.2.2 2023-02-23 12:34:11 +00:00
Sergio Garcia
849b703828 chore(resource-based scan): execute only applicable checks (#1934) 2023-02-23 13:30:21 +01:00
Sergio Garcia
4b935a40b6 fix(metadata): remove us-east-1 in remediation (#1958) 2023-02-23 13:19:10 +01:00
Sergio Garcia
5873a23ccb fix(key errors): solver EMR and IAM errrors (#1957) 2023-02-23 13:15:00 +01:00
Nacho Rivera
eae2786825 fix(cloudtrail): Handle when the CloudTrail bucket is in another account (#1956) 2023-02-23 13:04:32 +01:00
github-actions[bot]
6407386de5 chore(regions_update): Changes in regions for AWS services. (#1952)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-23 12:24:36 +01:00
Sergio Garcia
3fe950723f fix(actions): add README to docker action and filter steps for releases (#1955) 2023-02-23 12:22:41 +01:00
Sergio Garcia
52bf6acd46 chore(regions): add secret token to avoid stuck checks (#1954) 2023-02-23 12:11:54 +01:00
Sergio Garcia
9590e7d7e0 chore(poetry): make python-poetry as packaging and dependency manager (#1935)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-02-23 11:50:29 +01:00
github-actions[bot]
7a08140a2d chore(regions_update): Changes in regions for AWS services. (#1950)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-23 08:42:36 +01:00
dependabot[bot]
d1491cfbd1 build(deps): bump boto3 from 1.26.74 to 1.26.76 (#1948)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-22 08:01:13 +01:00
dependabot[bot]
695b80549d build(deps): bump botocore from 1.29.75 to 1.29.76 (#1946)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-22 07:50:39 +01:00
Sergio Garcia
11c60a637f release: 3.2.1 (#1945) 2023-02-21 17:22:02 +01:00
Sergio Garcia
844ad70bb9 fix(cloudwatch): allow " in regex patterns (#1943) 2023-02-21 16:46:23 +01:00
Sergio Garcia
5ac7cde577 chore(iam_disable_N_days_credentials): improve checks logic (#1923) 2023-02-21 15:20:33 +01:00
Sergio Garcia
ce3ef0550f chore(Security Hub): add status extended to Security Hub (#1921) 2023-02-21 15:11:43 +01:00
Sergio Garcia
813f3e7d42 fix(errors): handle errors when S3 buckets or EC2 instances are deleted (#1942) 2023-02-21 12:31:23 +01:00
Sergio Garcia
d03f97af6b fix(regions): add unique branch name (#1941) 2023-02-21 11:53:36 +01:00
github-actions[bot]
019ab0286d chore(regions_update): Changes in regions for AWS services. (#1940)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-21 11:47:03 +01:00
Fennerr
c6647b4706 chore(secrets): Improve the status_extended with more information (#1937)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-02-21 11:37:20 +01:00
Sergio Garcia
f913536d88 fix(services): solve errors in EMR, RDS, S3 and VPC services (#1913) 2023-02-21 11:11:39 +01:00
dependabot[bot]
640d1bd176 build(deps-dev): bump moto from 4.1.2 to 4.1.3 (#1939)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-21 07:48:08 +01:00
dependabot[bot]
66baccf528 build(deps): bump botocore from 1.29.74 to 1.29.75 (#1938)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-21 07:32:44 +01:00
Sergio Garcia
6e6dacbace chore(security hub): add --skip-sh-update (#1911) 2023-02-20 09:58:00 +01:00
dependabot[bot]
cdbb10fb26 build(deps): bump boto3 from 1.26.72 to 1.26.74 (#1933)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-20 07:56:40 +01:00
dependabot[bot]
c34ba3918c build(deps): bump botocore from 1.29.73 to 1.29.74 (#1932)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-20 07:34:20 +01:00
Fennerr
fa228c876c fix(iam_rotate_access_key_90_days): check only active access keys (#1929)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-02-17 12:53:28 +01:00
dependabot[bot]
2f4d0af7d7 build(deps): bump botocore from 1.29.72 to 1.29.73 (#1926)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-17 12:14:23 +01:00
github-actions[bot]
2d3e5235a9 chore(regions_update): Changes in regions for AWS services. (#1927)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-17 11:13:13 +01:00
dependabot[bot]
8e91ccaa54 build(deps): bump boto3 from 1.26.71 to 1.26.72 (#1925)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-17 10:56:19 +01:00
Fennerr
6955658b36 fix(quick_inventory): handle ApiGateway resources (#1924)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-02-16 18:29:23 +01:00
Fennerr
dbb44401fd fix(ecs_task_definitions_no_environment_secrets): dump_env_vars is reintialised (#1922) 2023-02-16 15:59:53 +01:00
dependabot[bot]
b42ed70c84 build(deps): bump botocore from 1.29.71 to 1.29.72 (#1919)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-16 14:21:46 +01:00
dependabot[bot]
a28276d823 build(deps): bump pydantic from 1.10.4 to 1.10.5 (#1918)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-16 13:51:37 +01:00
Pepe Fagoaga
fa4b27dd0e fix(compliance): Set Version as optional and fix list (#1899)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-02-16 12:47:39 +01:00
dependabot[bot]
0be44d5c49 build(deps): bump boto3 from 1.26.70 to 1.26.71 (#1920)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-16 12:38:10 +01:00
github-actions[bot]
2514596276 chore(regions_update): Changes in regions for AWS services. (#1910)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-16 11:56:10 +01:00
dependabot[bot]
7008d2a953 build(deps): bump botocore from 1.29.70 to 1.29.71 (#1909)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-15 07:39:16 +01:00
dependabot[bot]
2539fedfc4 build(deps): bump boto3 from 1.26.69 to 1.26.70 (#1908)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-15 07:12:18 +01:00
Ignacio Dominguez
b453df7591 fix(iam-credentials-expiration): IAM password policy expires passwords fix (#1903)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-02-14 13:54:58 +01:00
Pepe Fagoaga
9e5d5edcba fix(codebuild): Handle endTime in builds (#1900) 2023-02-14 11:27:53 +01:00
Nacho Rivera
2d5de6ff99 fix(cross account): cloudtrail s3 bucket logging (#1902) 2023-02-14 11:23:31 +01:00
github-actions[bot]
259e9f1c17 chore(regions_update): Changes in regions for AWS services. (#1901)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-14 10:28:04 +01:00
dependabot[bot]
daeb53009e build(deps): bump botocore from 1.29.69 to 1.29.70 (#1898)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-14 08:27:14 +01:00
dependabot[bot]
f12d271ca5 build(deps): bump boto3 from 1.26.51 to 1.26.69 (#1897)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-14 07:55:26 +01:00
dependabot[bot]
965185ca3b build(deps-dev): bump pylint from 2.16.1 to 2.16.2 (#1896) 2023-02-14 07:35:29 +01:00
94 changed files with 4963 additions and 2159 deletions

View File

@@ -47,9 +47,25 @@ jobs:
container-build:
# needs: dockerfile-linter
runs-on: ubuntu-latest
env:
POETRY_VIRTUALENVS_CREATE: "false"
steps:
- name: Checkout
uses: actions/checkout@v3
- name: setup python (release)
if: github.event_name == 'release'
uses: actions/setup-python@v2
with:
python-version: 3.9 #install the python needed
- name: Install dependencies (release)
if: github.event_name == 'release'
run: |
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: Update Prowler version (release)
if: github.event_name == 'release'
run: |
poetry version ${{ github.event.release.tag_name }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Build

View File

@@ -24,9 +24,9 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pipenv
pipenv install --dev
pipenv run pip list
pip install poetry
poetry install
poetry run pip list
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
grep '"tag_name":' | \
sed -E 's/.*"v([^"]+)".*/\1/' \
@@ -34,25 +34,25 @@ jobs:
&& chmod +x /tmp/hadolint
- name: Lint with flake8
run: |
pipenv run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
- name: Checking format with black
run: |
pipenv run black --check .
poetry run black --check .
- name: Lint with pylint
run: |
pipenv run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
- name: Bandit
run: |
pipenv run bandit -q -lll -x '*_test.py,./contrib/' -r .
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
- name: Safety
run: |
pipenv run safety check
poetry run safety check
- name: Vulture
run: |
pipenv run vulture --exclude "contrib" --min-confidence 100 .
poetry run vulture --exclude "contrib" --min-confidence 100 .
- name: Hadolint
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
- name: Test with pytest
run: |
pipenv run pytest tests -n auto
poetry run pytest tests -n auto

View File

@@ -5,11 +5,14 @@ on:
types: [published]
env:
GITHUB_BRANCH: ${{ github.event.release.tag_name }}
RELEASE_TAG: ${{ github.event.release.tag_name }}
GITHUB_BRANCH: master
jobs:
release-prowler-job:
runs-on: ubuntu-latest
env:
POETRY_VIRTUALENVS_CREATE: "false"
name: Release Prowler to PyPI
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
@@ -22,20 +25,45 @@ jobs:
python-version: 3.9 #install the python needed
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build toml --upgrade
- name: Build package
run: python -m build
- name: Publish prowler-cloud package to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.PYPI_API_TOKEN }}
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: Change version and Build package
run: |
poetry version ${{ env.RELEASE_TAG }}
git config user.name "github-actions"
git config user.email "<noreply@github.com>"
git add prowler/config/config.py pyproject.toml
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}"
git push -f origin ${{ env.RELEASE_TAG }}
poetry build
- name: Publish prowler package to PyPI
run: |
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
poetry publish
- name: Replicate PyPi Package
run: |
rm -rf ./dist && rm -rf ./build && rm -rf prowler_cloud.egg-info
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
python util/replicate_pypi_package.py
python -m build
- name: Publish prowler package to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
poetry build
- name: Publish prowler-cloud package to PyPI
run: |
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
poetry publish
# Create pull request with new version
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
with:
password: ${{ secrets.PYPI_API_TOKEN }}
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}."
branch: release-${{ env.RELEASE_TAG }}
labels: "status/waiting-for-revision, severity/low"
title: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}"
body: |
### Description
This PR updates Prowler Version to ${{ env.RELEASE_TAG }}.
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.

View File

@@ -52,9 +52,9 @@ jobs:
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services."
branch: "aws-services-regions-updated"
branch: "aws-services-regions-updated-${{ github.sha }}"
labels: "status/waiting-for-revision, severity/low"
title: "chore(regions_update): Changes in regions for AWS services."
body: |

View File

@@ -13,6 +13,13 @@ repos:
- id: pretty-format-json
args: ["--autofix", --no-sort-keys, --no-ensure-ascii]
## TOML
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.7.0
hooks:
- id: pretty-format-toml
args: [--autofix]
## BASH
- repo: https://github.com/koalaman/shellcheck-precommit
rev: v0.9.0

23
.readthedocs.yaml Normal file
View File

@@ -0,0 +1,23 @@
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
build:
os: "ubuntu-22.04"
tools:
python: "3.9"
jobs:
post_create_environment:
# Install poetry
# https://python-poetry.org/docs/#installing-manually
- pip install poetry
# Tell poetry to not use a virtual environment
- poetry config virtualenvs.create false
post_install:
- poetry install -E docs
mkdocs:
configuration: mkdocs.yml

View File

@@ -16,6 +16,7 @@ USER prowler
WORKDIR /home/prowler
COPY prowler/ /home/prowler/prowler/
COPY pyproject.toml /home/prowler
COPY README.md /home/prowler
# Install dependencies
ENV HOME='/home/prowler'
@@ -26,7 +27,7 @@ RUN pip install --no-cache-dir --upgrade pip && \
# Remove Prowler directory and build files
USER 0
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/build /home/prowler/prowler_cloud.egg-info
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
USER prowler
ENTRYPOINT ["prowler"]

View File

@@ -24,11 +24,11 @@ lint: ## Lint Code
##@ PyPI
pypi-clean: ## Delete the distribution files
rm -rf ./dist && rm -rf ./build && rm -rf prowler_cloud.egg-info
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
pypi-build: ## Build package
$(MAKE) pypi-clean && \
python3 -m build
poetry build
pypi-upload: ## Upload package
python3 -m twine upload --repository pypi dist/*

42
Pipfile
View File

@@ -1,42 +0,0 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
colorama = "0.4.4"
boto3 = "1.26.3"
arnparse = "0.0.2"
botocore = "1.29.69"
pydantic = "1.9.1"
schema = "0.7.5"
shodan = "1.28.0"
detect-secrets = "1.4.0"
alive-progress = "2.4.1"
tabulate = "0.9.0"
azure-identity = "1.12.0"
azure-storage-blob = "12.14.1"
msgraph-core = "0.2.2"
azure-mgmt-subscription = "3.1.1"
azure-mgmt-authorization = "3.0.0"
azure-mgmt-security = "3.0.0"
azure-mgmt-storage = "21.0.0"
[dev-packages]
black = "22.10.0"
pylint = "2.16.1"
flake8 = "5.0.4"
bandit = "1.7.4"
safety = "2.3.1"
vulture = "2.7"
moto = "4.1.2"
docker = "6.0.0"
openapi-spec-validator = "0.5.5"
pytest = "7.2.1"
pytest-xdist = "3.2.0"
coverage = "7.1.0"
sure = "2.0.1"
freezegun = "1.2.1"
[requires]
python_version = "3.9"

1703
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -64,13 +64,13 @@ The container images are available here:
## From Github
Python >= 3.9 is required with pip and pipenv:
Python >= 3.9 is required with pip and poetry:
```
git clone https://github.com/prowler-cloud/prowler
cd prowler
pipenv shell
pipenv install
poetry shell
poetry install
python prowler.py -v
```

View File

@@ -5,7 +5,7 @@
# Prowler Documentation
**Welcome to [Prowler Open Source v3](https://github.com/prowler-cloud/prowler/) Documentation!** 📄
**Welcome to [Prowler Open Source v3](https://github.com/prowler-cloud/prowler/) Documentation!** 📄
For **Prowler v2 Documentation**, please go [here](https://github.com/prowler-cloud/prowler/tree/2.12.0) to the branch and its README.md.
@@ -118,7 +118,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
./configure --enable-optimizations
sudo make altinstall
python3.9 --version
cd
cd
```
_Commands_:

View File

@@ -36,3 +36,12 @@ or for only one filtered region like eu-west-1:
Once you run findings for first time you will be able to see Prowler findings in Findings section:
![Screenshot 2020-10-29 at 10 29 05 PM](https://user-images.githubusercontent.com/3985464/97634676-66c9f600-1a36-11eb-9341-70feb06f6331.png)
## Skip sending updates of findings to Security Hub
By default, Prowler archives all its findings in Security Hub that have not appeared in the last scan.
You can skip this logic by using the option `--skip-sh-update` so Prowler will not archive older findings:
```sh
./prowler -S --skip-sh-update
```

2516
poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -11,6 +11,7 @@ from prowler.lib.check.check import (
exclude_services_to_run,
execute_checks,
get_checks_from_input_arn,
get_regions_from_audit_resources,
list_categories,
list_services,
print_categories,
@@ -136,6 +137,9 @@ def prowler():
# Once the audit_info is set and we have the eventual checks from arn, it is time to exclude the others
if audit_info.audit_resources:
audit_info.audited_regions = get_regions_from_audit_resources(
audit_info.audit_resources
)
checks_to_execute = get_checks_from_input_arn(
audit_info.audit_resources, provider
)
@@ -203,7 +207,7 @@ def prowler():
)
# Resolve previous fails of Security Hub
if provider == "aws" and args.security_hub:
if provider == "aws" and args.security_hub and not args.skip_sh_update:
resolve_security_hub_previous_findings(args.output_directory, audit_info)
# Display summary table
@@ -216,14 +220,15 @@ def prowler():
)
if compliance_framework and findings:
# Display compliance table
display_compliance_table(
findings,
bulk_checks_metadata,
compliance_framework,
audit_output_options.output_filename,
audit_output_options.output_directory,
)
for compliance in compliance_framework:
# Display compliance table
display_compliance_table(
findings,
bulk_checks_metadata,
compliance,
audit_output_options.output_filename,
audit_output_options.output_directory,
)
# If there are failed findings exit code 3, except if -z is input
if not args.ignore_exit_code_3 and stats["total_fail"] > 0:

View File

@@ -9,7 +9,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "3.2.0"
prowler_version = "3.2.2"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"

View File

@@ -179,17 +179,18 @@ def print_compliance_requirements(
bulk_compliance_frameworks: dict, compliance_frameworks: list
):
for compliance_framework in compliance_frameworks:
for compliance in bulk_compliance_frameworks.values():
# Workaround until we have more Compliance Frameworks
split_compliance = compliance_framework.split("_")
framework = split_compliance[0].upper()
version = split_compliance[1].upper()
provider = split_compliance[2].upper()
if framework in compliance.Framework and compliance.Version == version:
for key in bulk_compliance_frameworks.keys():
framework = bulk_compliance_frameworks[key].Framework
provider = bulk_compliance_frameworks[key].Provider
version = bulk_compliance_frameworks[key].Version
requirements = bulk_compliance_frameworks[key].Requirements
# We can list the compliance requirements for a given framework using the
# bulk_compliance_frameworks keys since they are the compliance specification file name
if compliance_framework == key:
print(
f"Listing {framework} {version} {provider} Compliance Requirements:\n"
)
for requirement in compliance.Requirements:
for requirement in requirements:
checks = ""
for check in requirement.Checks:
checks += f" {Fore.YELLOW}\t\t{check}\n{Style.RESET_ALL}"
@@ -510,19 +511,61 @@ def get_checks_from_input_arn(audit_resources: list, provider: str) -> set:
checks_from_arn = set()
# Handle if there are audit resources so only their services are executed
if audit_resources:
service_list = []
services_without_subservices = ["guardduty", "kms", "s3", "elb"]
service_list = set()
sub_service_list = set()
for resource in audit_resources:
service = resource.split(":")[2]
# Parse services when they are different in the ARNs
if service == "lambda":
service = "awslambda"
if service == "elasticloadbalancing":
service = "elb"
elif service == "logs":
service = "cloudwatch"
service_list.append(service)
sub_service = resource.split(":")[5].split("/")[0].replace("-", "_")
checks_from_arn = recover_checks_from_service(service_list, provider)
if (
service != "wafv2" and service != "waf"
): # WAF Services does not have checks
# Parse services when they are different in the ARNs
if service == "lambda":
service = "awslambda"
if service == "elasticloadbalancing":
service = "elb"
elif service == "logs":
service = "cloudwatch"
service_list.add(service)
# Get subservices to execute only applicable checks
if service not in services_without_subservices:
# Parse some specific subservices
if service == "ec2":
if sub_service == "security_group":
sub_service = "securitygroup"
if sub_service == "network_acl":
sub_service = "networkacl"
if sub_service == "image":
sub_service = "ami"
if service == "rds":
if sub_service == "cluster_snapshot":
sub_service = "snapshot"
sub_service_list.add(sub_service)
else:
sub_service_list.add(service)
checks = recover_checks_from_service(service_list, provider)
# Filter only checks with audited subservices
for check in checks:
if any(sub_service in check for sub_service in sub_service_list):
if not (sub_service == "policy" and "password_policy" in check):
checks_from_arn.add(check)
# Return final checks list
return checks_from_arn
return sorted(checks_from_arn)
def get_regions_from_audit_resources(audit_resources: list) -> list:
"""get_regions_from_audit_resources gets the regions from the audit resources arns"""
audited_regions = []
for resource in audit_resources:
region = resource.split(":")[3]
if region and region not in audited_regions: # Check if arn has a region
audited_regions.append(region)
if audited_regions:
return audited_regions
return None

View File

@@ -2,7 +2,7 @@ import sys
from enum import Enum
from typing import Optional, Union
from pydantic import BaseModel, ValidationError
from pydantic import BaseModel, ValidationError, root_validator
from prowler.lib.logger import logger
@@ -107,10 +107,21 @@ class Compliance_Base_Model(BaseModel):
Framework: str
Provider: str
Version: str
Version: Optional[str]
Description: str
Requirements: list[Compliance_Requirement]
@root_validator(pre=True)
# noqa: F841 - since vulture raises unused variable 'cls'
def framework_and_provider_must_not_be_empty(cls, values): # noqa: F841
framework, provider = (
values.get("Framework"),
values.get("Provider"),
)
if framework == "" or provider == "":
raise ValueError("Framework or Provider must not be empty")
return values
# Testing Pending
def load_compliance_framework(

View File

@@ -316,6 +316,11 @@ Detailed documentation at https://docs.prowler.cloud
action="store_true",
help="Send check output to AWS Security Hub",
)
aws_security_hub_subparser.add_argument(
"--skip-sh-update",
action="store_true",
help="Skip updating previous findings of Prowler in Security Hub",
)
# AWS Quick Inventory
aws_quick_inventory_subparser = aws_parser.add_argument_group("Quick Inventory")
aws_quick_inventory_subparser.add_argument(

View File

@@ -4,7 +4,7 @@ from csv import DictWriter
from colorama import Fore, Style
from tabulate import tabulate
from prowler.config.config import timestamp, orange_color
from prowler.config.config import orange_color, timestamp
from prowler.lib.logger import logger
from prowler.lib.outputs.models import (
Check_Output_CSV_CIS,
@@ -167,7 +167,7 @@ def display_compliance_table(
output_directory: str,
):
try:
if "ens_rd2022_aws" in compliance_framework:
if "ens_rd2022_aws" == compliance_framework:
marcos = {}
ens_compliance_table = {
"Proveedor": [],
@@ -266,9 +266,9 @@ def display_compliance_table(
)
print(f"\nResultados detallados de {compliance_fm} en:")
print(
f" - CSV: {output_directory}/{output_filename}_{compliance_framework[0]}.csv\n"
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
)
elif "cis_1." in str(compliance_framework):
elif "cis_1." in compliance_framework:
sections = {}
cis_compliance_table = {
"Provider": [],
@@ -281,8 +281,9 @@ def display_compliance_table(
check = bulk_checks_metadata[finding.check_metadata.CheckID]
check_compliances = check.Compliance
for compliance in check_compliances:
if compliance.Framework == "CIS" and compliance.Version in str(
compliance_framework
if (
compliance.Framework == "CIS"
and compliance.Version in compliance_framework
):
compliance_version = compliance.Version
compliance_fm = compliance.Framework
@@ -360,12 +361,12 @@ def display_compliance_table(
)
print(f"\nDetailed results of {compliance_fm} are in:")
print(
f" - CSV: {output_directory}/{output_filename}_{compliance_framework[0]}.csv\n"
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
)
else:
print(f"\nDetailed results of {compliance_framework[0].upper()} are in:")
print(f"\nDetailed results of {compliance_framework.upper()} are in:")
print(
f" - CSV: {output_directory}/{output_filename}_{compliance_framework[0]}.csv\n"
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
)
except Exception as error:
logger.critical(

View File

@@ -31,7 +31,7 @@ def fill_json_asff(finding_output, audit_info, finding):
) = finding_output.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
finding_output.Severity = Severity(Label=finding.check_metadata.Severity.upper())
finding_output.Title = finding.check_metadata.CheckTitle
finding_output.Description = finding.check_metadata.Description
finding_output.Description = finding.status_extended
finding_output.Resources = [
Resource(
Id=finding.resource_arn,

View File

@@ -1630,6 +1630,13 @@
]
}
},
"cloudtrail-data": {
"regions": {
"aws": [],
"aws-cn": [],
"aws-us-gov": []
}
},
"cloudwatch": {
"regions": {
"aws": [
@@ -2196,6 +2203,11 @@
"connectcases": {
"regions": {
"aws": [
"ap-southeast-1",
"ap-southeast-2",
"ca-central-1",
"eu-central-1",
"eu-west-2",
"us-east-1",
"us-west-2"
],
@@ -2572,17 +2584,17 @@
"af-south-1",
"ap-east-1",
"ap-northeast-1",
"ap-northeast-3",
"ap-southeast-1",
"ca-central-1",
"eu-south-2",
"eu-west-1",
"eu-west-3",
"us-east-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-southeast-4",
"eu-west-2",
"me-south-1",
"eu-central-2",
"sa-east-1",
"us-east-2",
"us-west-2",
@@ -2591,7 +2603,9 @@
"eu-central-1",
"eu-north-1",
"eu-south-1",
"eu-west-2",
"me-central-1",
"me-south-1",
"us-west-1"
],
"aws-cn": [
@@ -3003,6 +3017,7 @@
"regions": {
"aws": [
"ap-south-1",
"ap-south-2",
"ca-central-1",
"eu-west-1",
"eu-west-2",
@@ -3012,7 +3027,6 @@
"af-south-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"eu-central-2",
"eu-south-1",
"eu-south-2",
@@ -3020,6 +3034,7 @@
"me-central-1",
"sa-east-1",
"ap-east-1",
"ap-northeast-3",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
@@ -3029,8 +3044,8 @@
"us-west-2"
],
"aws-cn": [
"cn-north-1",
"cn-northwest-1"
"cn-northwest-1",
"cn-north-1"
],
"aws-us-gov": [
"us-gov-east-1",
@@ -3319,20 +3334,23 @@
"sa-east-1",
"us-east-1",
"us-east-2",
"us-west-2",
"ap-northeast-1",
"ap-northeast-2",
"ap-southeast-2",
"ca-central-1",
"eu-central-1",
"eu-west-2",
"us-west-1"
"us-west-1",
"us-west-2"
],
"aws-cn": [
"cn-northwest-1",
"cn-north-1"
],
"aws-us-gov": []
"aws-us-gov": [
"us-gov-east-1",
"us-gov-west-1"
]
}
},
"emr-serverless": {
@@ -4155,12 +4173,12 @@
"us-west-2",
"ap-east-1",
"ap-northeast-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ca-central-1",
"eu-south-1",
"eu-west-1",
"me-central-1",
"me-south-1",
"us-east-2",
"ap-northeast-2",
@@ -4168,6 +4186,7 @@
"eu-central-2",
"eu-north-1",
"eu-west-3",
"me-central-1",
"us-west-1"
],
"aws-cn": [
@@ -4883,6 +4902,8 @@
"kendra-ranking": {
"regions": {
"aws": [
"ap-northeast-1",
"ap-south-1",
"ap-southeast-1",
"ap-southeast-2",
"ca-central-1",
@@ -4953,14 +4974,15 @@
"eu-north-1",
"eu-south-1",
"eu-west-1",
"eu-west-3",
"me-central-1",
"us-east-1",
"us-west-1",
"af-south-1",
"ap-northeast-3",
"ap-south-1",
"eu-central-1",
"eu-west-2"
"eu-west-2",
"eu-west-3"
],
"aws-cn": [
"cn-north-1",
@@ -5043,8 +5065,8 @@
"regions": {
"aws": [
"af-south-1",
"ap-northeast-2",
"ap-southeast-1",
"ap-southeast-3",
"ca-central-1",
"eu-central-1",
"eu-north-1",
@@ -5053,14 +5075,15 @@
"us-west-1",
"ap-east-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"me-south-1",
"us-west-2",
"ap-south-1",
"ap-southeast-2",
"eu-west-2",
"sa-east-1",
"us-east-2"
],
@@ -6823,13 +6846,14 @@
"us-west-1",
"ap-east-1",
"ap-northeast-1",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-west-2",
"me-central-1",
"me-south-1",
"sa-east-1",
"us-west-2"
"us-west-2",
"eu-central-1"
],
"aws-cn": [
"cn-north-1",
@@ -7010,13 +7034,15 @@
"ap-east-1",
"ap-southeast-2",
"ca-central-1",
"eu-central-1",
"eu-north-1",
"eu-west-3",
"me-central-1",
"sa-east-1",
"us-west-1",
"ap-northeast-1",
"ap-south-1",
"eu-central-1",
"eu-south-2",
"eu-west-2",
"us-east-1",
"us-east-2"
@@ -8046,6 +8072,22 @@
"aws-us-gov": []
}
},
"simpledb": {
"regions": {
"aws": [
"ap-northeast-1",
"ap-southeast-1",
"ap-southeast-2",
"eu-west-1",
"sa-east-1",
"us-east-1",
"us-west-1",
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"simspaceweaver": {
"regions": {
"aws": [

View File

@@ -50,7 +50,6 @@ def quick_inventory(audit_info: AWS_Audit_Info, output_directory: str):
or region == "us-gov-west-1"
or region == "cn-north-1"
):
get_roles_paginator = iam_client.get_paginator("list_roles")
for page in get_roles_paginator.paginate():
for role in page["Roles"]:
@@ -117,7 +116,6 @@ def quick_inventory(audit_info: AWS_Audit_Info, output_directory: str):
def create_inventory_table(resources: list) -> dict:
services = {}
# { "S3":
# 123,
@@ -143,6 +141,14 @@ def create_inventory_table(resources: list) -> dict:
resource_type = "topic"
elif service == "sqs":
resource_type = "queue"
elif service == "apigateway":
split_parts = resource.split(":")[5].split("/")
if "integration" in split_parts and "responses" in split_parts:
resource_type = "restapis-resources-methods-integration-response"
elif "documentation" in split_parts and "parts" in split_parts:
resource_type = "restapis-documentation-parts"
else:
resource_type = resource.split(":")[5].split("/")[1]
else:
resource_type = resource.split(":")[5].split("/")[0]
if service not in resources_type:
@@ -171,7 +177,6 @@ def create_inventory_table(resources: list) -> dict:
def create_output(resources: list, audit_info: AWS_Audit_Info, output_directory: str):
json_output = []
output_file = f"{output_directory}/prowler-inventory-{audit_info.audited_account}-{output_file_timestamp}"

View File

@@ -26,15 +26,40 @@ class awslambda_function_no_secrets_in_code(Check):
function.code.code_zip.extractall(tmp_dir_name)
# List all files
files_in_zip = next(os.walk(tmp_dir_name))[2]
secrets_findings = []
for file in files_in_zip:
secrets = SecretsCollection()
with default_settings():
secrets.scan_file(f"{tmp_dir_name}/{file}")
detect_secrets_output = secrets.json()
if detect_secrets_output:
for (
file_name
) in (
detect_secrets_output.keys()
): # Appears that only 1 file is being scanned at a time, so could rework this
output_file_name = file_name.replace(
f"{tmp_dir_name}/", ""
)
secrets_string = ", ".join(
[
f"{secret['type']} on line {secret['line_number']}"
for secret in detect_secrets_output[file_name]
]
)
secrets_findings.append(
f"{output_file_name}: {secrets_string}"
)
if secrets.json():
report.status = "FAIL"
report.status_extended = f"Potential secret found in Lambda function {function.name} code"
break
if secrets_findings:
final_output_string = "; ".join(secrets_findings)
report.status = "FAIL"
# report.status_extended = f"Potential {'secrets' if len(secrets_findings)>1 else 'secret'} found in Lambda function {function.name} code. {final_output_string}"
if len(secrets_findings) > 1:
report.status_extended = f"Potential secrets found in Lambda function {function.name} code -> {final_output_string}"
else:
report.status_extended = f"Potential secret found in Lambda function {function.name} code -> {final_output_string}"
# break // Don't break as there may be additional findings
findings.append(report)

View File

@@ -27,7 +27,8 @@ class awslambda_function_no_secrets_in_variables(Check):
temp_env_data_file = tempfile.NamedTemporaryFile(delete=False)
temp_env_data_file.write(
bytes(
json.dumps(function.environment), encoding="raw_unicode_escape"
json.dumps(function.environment, indent=2),
encoding="raw_unicode_escape",
)
)
temp_env_data_file.close()
@@ -35,9 +36,17 @@ class awslambda_function_no_secrets_in_variables(Check):
with default_settings():
secrets.scan_file(temp_env_data_file.name)
if secrets.json():
detect_secrets_output = secrets.json()
if detect_secrets_output:
environment_variable_names = list(function.environment.keys())
secrets_string = ", ".join(
[
f"{secret['type']} in variable {environment_variable_names[int(secret['line_number'])-2]}"
for secret in detect_secrets_output[temp_env_data_file.name]
]
)
report.status = "FAIL"
report.status_extended = f"Potential secret found in Lambda function {function.name} variables"
report.status_extended = f"Potential secret found in Lambda function {function.name} variables -> {secrets_string}"
os.remove(temp_env_data_file.name)

View File

@@ -13,7 +13,7 @@
"RelatedUrl": "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/using-cfn-protect-stacks.html",
"Remediation": {
"Code": {
"CLI": "aws cloudformation update-termination-protection --region us-east-1 --stack-name <STACK_NAME> --enable-termination-protection",
"CLI": "aws cloudformation update-termination-protection --region <REGION_NAME> --stack-name <STACK_NAME> --enable-termination-protection",
"NativeIaC": "",
"Other": "",
"Terraform": ""

View File

@@ -10,6 +10,7 @@ class cloudtrail_logs_s3_bucket_access_logging_enabled(Check):
findings = []
for trail in cloudtrail_client.trails:
if trail.name:
trail_bucket_is_in_account = False
trail_bucket = trail.s3_bucket
report = Check_Report_AWS(self.metadata())
report.region = trail.region
@@ -21,13 +22,19 @@ class cloudtrail_logs_s3_bucket_access_logging_enabled(Check):
else:
report.status_extended = f"Single region Trail {trail.name} S3 bucket access logging is not enabled for bucket {trail_bucket}"
for bucket in s3_client.buckets:
if trail_bucket == bucket.name and bucket.logging:
report.status = "PASS"
if trail.is_multiregion:
report.status_extended = f"Multiregion trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}"
else:
report.status_extended = f"Single region trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}"
if trail_bucket == bucket.name:
trail_bucket_is_in_account = True
if bucket.logging:
report.status = "PASS"
if trail.is_multiregion:
report.status_extended = f"Multiregion trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}"
else:
report.status_extended = f"Single region trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}"
break
# check if trail is delivering logs in a cross account bucket
if not trail_bucket_is_in_account:
report.status_extended = f"Trail {trail.name} is delivering logs in a cross-account bucket {trail_bucket} in another account out of Prowler's permissions scope, please check it manually"
findings.append(report)
return findings

View File

@@ -10,6 +10,7 @@ class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check):
findings = []
for trail in cloudtrail_client.trails:
if trail.name:
trail_bucket_is_in_account = False
trail_bucket = trail.s3_bucket
report = Check_Report_AWS(self.metadata())
report.region = trail.region
@@ -23,19 +24,23 @@ class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check):
for bucket in s3_client.buckets:
# Here we need to ensure that acl_grantee is filled since if we don't have permissions to query the api for a concrete region
# (for example due to a SCP) we are going to try access an attribute from a None type
if trail_bucket == bucket.name and bucket.acl_grantees:
for grant in bucket.acl_grantees:
if (
grant.URI
== "http://acs.amazonaws.com/groups/global/AllUsers"
):
report.status = "FAIL"
if trail.is_multiregion:
report.status_extended = f"S3 Bucket {trail_bucket} from multiregion trail {trail.name} is publicly accessible"
else:
report.status_extended = f"S3 Bucket {trail_bucket} from single region trail {trail.name} is publicly accessible"
break
if trail_bucket == bucket.name:
trail_bucket_is_in_account = True
if bucket.acl_grantees:
for grant in bucket.acl_grantees:
if (
grant.URI
== "http://acs.amazonaws.com/groups/global/AllUsers"
):
report.status = "FAIL"
if trail.is_multiregion:
report.status_extended = f"S3 Bucket {trail_bucket} from multiregion trail {trail.name} is publicly accessible"
else:
report.status_extended = f"S3 Bucket {trail_bucket} from single region trail {trail.name} is publicly accessible"
break
# check if trail bucket is a cross account bucket
if not trail_bucket_is_in_account:
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually"
findings.append(report)
return findings

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_changes_to_network_acls_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateNetworkAcl.+\$\.eventName\s*=\s*CreateNetworkAclEntry.+\$\.eventName\s*=\s*DeleteNetworkAcl.+\$\.eventName\s*=\s*DeleteNetworkAclEntry.+\$\.eventName\s*=\s*ReplaceNetworkAclEntry.+\$\.eventName\s*=\s*ReplaceNetworkAclAssociation"
pattern = r"\$\.eventName\s*=\s*.?CreateNetworkAcl.+\$\.eventName\s*=\s*.?CreateNetworkAclEntry.+\$\.eventName\s*=\s*.?DeleteNetworkAcl.+\$\.eventName\s*=\s*.?DeleteNetworkAclEntry.+\$\.eventName\s*=\s*.?ReplaceNetworkAclEntry.+\$\.eventName\s*=\s*.?ReplaceNetworkAclAssociation.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_changes_to_network_gateways_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateCustomerGateway.+\$\.eventName\s*=\s*DeleteCustomerGateway.+\$\.eventName\s*=\s*AttachInternetGateway.+\$\.eventName\s*=\s*CreateInternetGateway.+\$\.eventName\s*=\s*DeleteInternetGateway.+\$\.eventName\s*=\s*DetachInternetGateway"
pattern = r"\$\.eventName\s*=\s*.?CreateCustomerGateway.+\$\.eventName\s*=\s*.?DeleteCustomerGateway.+\$\.eventName\s*=\s*.?AttachInternetGateway.+\$\.eventName\s*=\s*.?CreateInternetGateway.+\$\.eventName\s*=\s*.?DeleteInternetGateway.+\$\.eventName\s*=\s*.?DetachInternetGateway.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_changes_to_network_route_tables_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateRoute.+\$\.eventName\s*=\s*CreateRouteTable.+\$\.eventName\s*=\s*ReplaceRoute.+\$\.eventName\s*=\s*ReplaceRouteTableAssociation.+\$\.eventName\s*=\s*DeleteRouteTable.+\$\.eventName\s*=\s*DeleteRoute.+\$\.eventName\s*=\s*DisassociateRouteTable"
pattern = r"\$\.eventName\s*=\s*.?CreateRoute.+\$\.eventName\s*=\s*.?CreateRouteTable.+\$\.eventName\s*=\s*.?ReplaceRoute.+\$\.eventName\s*=\s*.?ReplaceRouteTableAssociation.+\$\.eventName\s*=\s*.?DeleteRouteTable.+\$\.eventName\s*=\s*.?DeleteRoute.+\$\.eventName\s*=\s*.?DisassociateRouteTable.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_changes_to_vpcs_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateVpc.+\$\.eventName\s*=\s*DeleteVpc.+\$\.eventName\s*=\s*ModifyVpcAttribute.+\$\.eventName\s*=\s*AcceptVpcPeeringConnection.+\$\.eventName\s*=\s*CreateVpcPeeringConnection.+\$\.eventName\s*=\s*DeleteVpcPeeringConnection.+\$\.eventName\s*=\s*RejectVpcPeeringConnection.+\$\.eventName\s*=\s*AttachClassicLinkVpc.+\$\.eventName\s*=\s*DetachClassicLinkVpc.+\$\.eventName\s*=\s*DisableVpcClassicLink.+\$\.eventName\s*=\s*EnableVpcClassicLink"
pattern = r"\$\.eventName\s*=\s*.?CreateVpc.+\$\.eventName\s*=\s*.?DeleteVpc.+\$\.eventName\s*=\s*.?ModifyVpcAttribute.+\$\.eventName\s*=\s*.?AcceptVpcPeeringConnection.+\$\.eventName\s*=\s*.?CreateVpcPeeringConnection.+\$\.eventName\s*=\s*.?DeleteVpcPeeringConnection.+\$\.eventName\s*=\s*.?RejectVpcPeeringConnection.+\$\.eventName\s*=\s*.?AttachClassicLinkVpc.+\$\.eventName\s*=\s*.?DetachClassicLinkVpc.+\$\.eventName\s*=\s*.?DisableVpcClassicLink.+\$\.eventName\s*=\s*.?EnableVpcClassicLink.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_change
Check
):
def execute(self):
pattern = r"\$\.eventSource\s*=\s*config.amazonaws.com.+\$\.eventName\s*=\s*StopConfigurationRecorder.+\$\.eventName\s*=\s*DeleteDeliveryChannel.+\$\.eventName\s*=\s*PutDeliveryChannel.+\$\.eventName\s*=\s*PutConfigurationRecorder"
pattern = r"\$\.eventSource\s*=\s*.?config.amazonaws.com.+\$\.eventName\s*=\s*.?StopConfigurationRecorder.+\$\.eventName\s*=\s*.?DeleteDeliveryChannel.+\$\.eventName\s*=\s*.?PutDeliveryChannel.+\$\.eventName\s*=\s*.?PutConfigurationRecorder.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_change
Check
):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateTrail.+\$\.eventName\s*=\s*UpdateTrail.+\$\.eventName\s*=\s*DeleteTrail.+\$\.eventName\s*=\s*StartLogging.+\$\.eventName\s*=\s*StopLogging"
pattern = r"\$\.eventName\s*=\s*.?CreateTrail.+\$\.eventName\s*=\s*.?UpdateTrail.+\$\.eventName\s*=\s*.?DeleteTrail.+\$\.eventName\s*=\s*.?StartLogging.+\$\.eventName\s*=\s*.?StopLogging.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_authentication_failures(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*ConsoleLogin.+\$\.errorMessage\s*=\s*Failed authentication"
pattern = r"\$\.eventName\s*=\s*.?ConsoleLogin.+\$\.errorMessage\s*=\s*.?Failed authentication.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_aws_organizations_changes(Check):
def execute(self):
pattern = r"\$\.eventSource\s*=\s*organizations\.amazonaws\.com.+\$\.eventName\s*=\s*AcceptHandshake.+\$\.eventName\s*=\s*AttachPolicy.+\$\.eventName\s*=\s*CancelHandshake.+\$\.eventName\s*=\s*CreateAccount.+\$\.eventName\s*=\s*CreateOrganization.+\$\.eventName\s*=\s*CreateOrganizationalUnit.+\$\.eventName\s*=\s*CreatePolicy.+\$\.eventName\s*=\s*DeclineHandshake.+\$\.eventName\s*=\s*DeleteOrganization.+\$\.eventName\s*=\s*DeleteOrganizationalUnit.+\$\.eventName\s*=\s*DeletePolicy.+\$\.eventName\s*=\s*EnableAllFeatures.+\$\.eventName\s*=\s*EnablePolicyType.+\$\.eventName\s*=\s*InviteAccountToOrganization.+\$\.eventName\s*=\s*LeaveOrganization.+\$\.eventName\s*=\s*DetachPolicy.+\$\.eventName\s*=\s*DisablePolicyType.+\$\.eventName\s*=\s*MoveAccount.+\$\.eventName\s*=\s*RemoveAccountFromOrganization.+\$\.eventName\s*=\s*UpdateOrganizationalUnit.+\$\.eventName\s*=\s*UpdatePolicy"
pattern = r"\$\.eventSource\s*=\s*.?organizations\.amazonaws\.com.+\$\.eventName\s*=\s*.?AcceptHandshake.+\$\.eventName\s*=\s*.?AttachPolicy.+\$\.eventName\s*=\s*.?CancelHandshake.+\$\.eventName\s*=\s*.?CreateAccount.+\$\.eventName\s*=\s*.?CreateOrganization.+\$\.eventName\s*=\s*.?CreateOrganizationalUnit.+\$\.eventName\s*=\s*.?CreatePolicy.+\$\.eventName\s*=\s*.?DeclineHandshake.+\$\.eventName\s*=\s*.?DeleteOrganization.+\$\.eventName\s*=\s*.?DeleteOrganizationalUnit.+\$\.eventName\s*=\s*.?DeletePolicy.+\$\.eventName\s*=\s*.?EnableAllFeatures.+\$\.eventName\s*=\s*.?EnablePolicyType.+\$\.eventName\s*=\s*.?InviteAccountToOrganization.+\$\.eventName\s*=\s*.?LeaveOrganization.+\$\.eventName\s*=\s*.?DetachPolicy.+\$\.eventName\s*=\s*.?DisablePolicyType.+\$\.eventName\s*=\s*.?MoveAccount.+\$\.eventName\s*=\s*.?RemoveAccountFromOrganization.+\$\.eventName\s*=\s*.?UpdateOrganizationalUnit.+\$\.eventName\s*=\s*.?UpdatePolicy.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk(Check):
def execute(self):
pattern = r"\$\.eventSource\s*=\s*kms.amazonaws.com.+\$\.eventName\s*=\s*DisableKey.+\$\.eventName\s*=\s*ScheduleKeyDeletion"
pattern = r"\$\.eventSource\s*=\s*.?kms.amazonaws.com.+\$\.eventName\s*=\s*.?DisableKey.+\$\.eventName\s*=\s*.?ScheduleKeyDeletion.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_for_s3_bucket_policy_changes(Check):
def execute(self):
pattern = r"\$\.eventSource\s*=\s*s3.amazonaws.com.+\$\.eventName\s*=\s*PutBucketAcl.+\$\.eventName\s*=\s*PutBucketPolicy.+\$\.eventName\s*=\s*PutBucketCors.+\$\.eventName\s*=\s*PutBucketLifecycle.+\$\.eventName\s*=\s*PutBucketReplication.+\$\.eventName\s*=\s*DeleteBucketPolicy.+\$\.eventName\s*=\s*DeleteBucketCors.+\$\.eventName\s*=\s*DeleteBucketLifecycle.+\$\.eventName\s*=\s*DeleteBucketReplication"
pattern = r"\$\.eventSource\s*=\s*.?s3.amazonaws.com.+\$\.eventName\s*=\s*.?PutBucketAcl.+\$\.eventName\s*=\s*.?PutBucketPolicy.+\$\.eventName\s*=\s*.?PutBucketCors.+\$\.eventName\s*=\s*.?PutBucketLifecycle.+\$\.eventName\s*=\s*.?PutBucketReplication.+\$\.eventName\s*=\s*.?DeleteBucketPolicy.+\$\.eventName\s*=\s*.?DeleteBucketCors.+\$\.eventName\s*=\s*.?DeleteBucketLifecycle.+\$\.eventName\s*=\s*.?DeleteBucketReplication.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_policy_changes(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*DeleteGroupPolicy.+\$\.eventName\s*=\s*DeleteRolePolicy.+\$\.eventName\s*=\s*DeleteUserPolicy.+\$\.eventName\s*=\s*PutGroupPolicy.+\$\.eventName\s*=\s*PutRolePolicy.+\$\.eventName\s*=\s*PutUserPolicy.+\$\.eventName\s*=\s*CreatePolicy.+\$\.eventName\s*=\s*DeletePolicy.+\$\.eventName\s*=\s*CreatePolicyVersion.+\$\.eventName\s*=\s*DeletePolicyVersion.+\$\.eventName\s*=\s*AttachRolePolicy.+\$\.eventName\s*=\s*DetachRolePolicy.+\$\.eventName\s*=\s*AttachUserPolicy.+\$\.eventName\s*=\s*DetachUserPolicy.+\$\.eventName\s*=\s*AttachGroupPolicy.+\$\.eventName\s*=\s*DetachGroupPolicy"
pattern = r"\$\.eventName\s*=\s*.?DeleteGroupPolicy.+\$\.eventName\s*=\s*.?DeleteRolePolicy.+\$\.eventName\s*=\s*.?DeleteUserPolicy.+\$\.eventName\s*=\s*.?PutGroupPolicy.+\$\.eventName\s*=\s*.?PutRolePolicy.+\$\.eventName\s*=\s*.?PutUserPolicy.+\$\.eventName\s*=\s*.?CreatePolicy.+\$\.eventName\s*=\s*.?DeletePolicy.+\$\.eventName\s*=\s*.?CreatePolicyVersion.+\$\.eventName\s*=\s*.?DeletePolicyVersion.+\$\.eventName\s*=\s*.?AttachRolePolicy.+\$\.eventName\s*=\s*.?DetachRolePolicy.+\$\.eventName\s*=\s*.?AttachUserPolicy.+\$\.eventName\s*=\s*.?DetachUserPolicy.+\$\.eventName\s*=\s*.?AttachGroupPolicy.+\$\.eventName\s*=\s*.?DetachGroupPolicy.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_root_usage(Check):
def execute(self):
pattern = r"\$\.userIdentity\.type\s*=\s*Root.+\$\.userIdentity\.invokedBy NOT EXISTS.+\$\.eventType\s*!=\s*AwsServiceEvent"
pattern = r"\$\.userIdentity\.type\s*=\s*.?Root.+\$\.userIdentity\.invokedBy NOT EXISTS.+\$\.eventType\s*!=\s*.?AwsServiceEvent.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_security_group_changes(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*AuthorizeSecurityGroupIngress.+\$\.eventName\s*=\s*AuthorizeSecurityGroupEgress.+\$\.eventName\s*=\s*RevokeSecurityGroupIngress.+\$\.eventName\s*=\s*RevokeSecurityGroupEgress.+\$\.eventName\s*=\s*CreateSecurityGroup.+\$\.eventName\s*=\s*DeleteSecurityGroup"
pattern = r"\$\.eventName\s*=\s*.?AuthorizeSecurityGroupIngress.+\$\.eventName\s*=\s*.?AuthorizeSecurityGroupEgress.+\$\.eventName\s*=\s*.?RevokeSecurityGroupIngress.+\$\.eventName\s*=\s*.?RevokeSecurityGroupEgress.+\$\.eventName\s*=\s*.?CreateSecurityGroup.+\$\.eventName\s*=\s*.?DeleteSecurityGroup.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_sign_in_without_mfa(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*ConsoleLogin.+\$\.additionalEventData\.MFAUsed\s*!=\s*Yes"
pattern = r"\$\.eventName\s*=\s*.?ConsoleLogin.+\$\.additionalEventData\.MFAUsed\s*!=\s*.?Yes.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_unauthorized_api_calls(Check):
def execute(self):
pattern = r"\$\.errorCode\s*=\s*\*UnauthorizedOperation.+\$\.errorCode\s*=\s*AccessDenied\*"
pattern = r"\$\.errorCode\s*=\s*.?\*UnauthorizedOperation.+\$\.errorCode\s*=\s*.?AccessDenied\*.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -66,9 +66,10 @@ class Codebuild:
if len(ids["ids"]) > 0:
builds = client.batch_get_builds(ids=[ids["ids"][0]])
if "builds" in builds:
project.last_invoked_time = builds["builds"][0][
"endTime"
]
if "endTime" in builds["builds"][0]:
project.last_invoked_time = builds["builds"][0][
"endTime"
]
projects = client.batch_get_projects(names=[project.name])[
"projects"
@@ -86,7 +87,7 @@ class Codebuild:
class CodebuildProject:
name: str
region: str
last_invoked_time: datetime
last_invoked_time: Optional[datetime.datetime]
buildspec: Optional[str]
def __init__(self, name, region, last_invoked_time, buildspec):

View File

@@ -1,6 +1,8 @@
import threading
from dataclasses import dataclass
from botocore.client import ClientError
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -239,6 +241,11 @@ class EC2:
)["UserData"]
if "Value" in user_data:
instance.user_data = user_data["Value"]
except ClientError as error:
if error.response["Error"]["Code"] == "InvalidInstanceID.NotFound":
logger.warning(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -20,13 +20,13 @@ class ecs_task_definitions_no_environment_secrets(Check):
report.status = "PASS"
report.status_extended = f"No secrets found in variables of ECS task definition {task_definition.name} with revision {task_definition.revision}"
if task_definition.environment_variables:
dump_env_vars = {}
for env_var in task_definition.environment_variables:
dump_env_vars = {}
dump_env_vars.update({env_var.name: env_var.value})
temp_env_data_file = tempfile.NamedTemporaryFile(delete=False)
env_data = dumps(dump_env_vars)
env_data = dumps(dump_env_vars, indent=2)
temp_env_data_file.write(bytes(env_data, encoding="raw_unicode_escape"))
temp_env_data_file.close()
@@ -34,9 +34,16 @@ class ecs_task_definitions_no_environment_secrets(Check):
with default_settings():
secrets.scan_file(temp_env_data_file.name)
if secrets.json():
detect_secrets_output = secrets.json()
if detect_secrets_output:
secrets_string = ", ".join(
[
f"{secret['type']} on line {secret['line_number']}"
for secret in detect_secrets_output[temp_env_data_file.name]
]
)
report.status = "FAIL"
report.status_extended = f"Potential secret found in variables of ECS task definition {task_definition.name} with revision {task_definition.revision}"
report.status_extended = f"Potential secret found in variables of ECS task definition {task_definition.name} with revision {task_definition.revision} -> {secrets_string}"
os.remove(temp_env_data_file.name)

View File

@@ -97,6 +97,7 @@ class EMR:
slave_node_security_group = cluster_info["Cluster"][
"Ec2InstanceAttributes"
]["EmrManagedSlaveSecurityGroup"]
slave_node_additional_security_groups = []
if (
"AdditionalSlaveSecurityGroups"
in cluster_info["Cluster"]["Ec2InstanceAttributes"]
@@ -110,16 +111,19 @@ class EMR:
)
# Save MasterPublicDnsName
master_public_dns_name = cluster_info["Cluster"][
master_public_dns_name = cluster_info["Cluster"].get(
"MasterPublicDnsName"
]
)
self.clusters[
cluster.id
].master_public_dns_name = master_public_dns_name
# Set cluster Public/Private
# Public EMR cluster have their DNS ending with .amazonaws.com
# while private ones have format of ip-xxx-xx-xx.us-east-1.compute.internal.
if ".amazonaws.com" in master_public_dns_name:
if (
master_public_dns_name
and ".amazonaws.com" in master_public_dns_name
):
self.clusters[cluster.id].public = True
except Exception as error:

View File

@@ -9,9 +9,7 @@ maximum_expiration_days = 30
class iam_disable_30_days_credentials(Check):
def execute(self) -> Check_Report_AWS:
findings = []
response = iam_client.users
for user in response:
for user in iam_client.users:
report = Check_Report_AWS(self.metadata())
report.resource_id = user.name
report.resource_arn = user.arn
@@ -25,10 +23,10 @@ class iam_disable_30_days_credentials(Check):
)
if time_since_insertion.days > maximum_expiration_days:
report.status = "FAIL"
report.status_extended = f"User {user.name} has not logged in to the console in the past 30 days."
report.status_extended = f"User {user.name} has not logged in to the console in the past {maximum_expiration_days} days."
else:
report.status = "PASS"
report.status_extended = f"User {user.name} has logged in to the console in the past 30 days."
report.status_extended = f"User {user.name} has logged in to the console in the past {maximum_expiration_days} days."
else:
report.status = "PASS"
report.status_extended = (
@@ -38,4 +36,52 @@ class iam_disable_30_days_credentials(Check):
# Append report
findings.append(report)
for user in iam_client.credential_report:
report = Check_Report_AWS(self.metadata())
report.region = iam_client.region
report.resource_id = user["user"]
report.resource_arn = user["arn"]
if (
user["access_key_1_active"] != "true"
and user["access_key_2_active"] != "true"
):
report.status = "PASS"
report.status_extended = (
f"User {user['user']} does not have access keys."
)
else:
old_access_keys = False
if user["access_key_1_active"] == "true":
if user["access_key_1_last_used_date"] != "N/A":
access_key_1_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_1_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_1_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 1 in the last {maximum_expiration_days} days ({access_key_1_last_used_date.days} days)."
if user["access_key_2_active"] == "true":
if user["access_key_2_last_used_date"] != "N/A":
access_key_2_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_2_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_2_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 2 in the last {maximum_expiration_days} days ({access_key_2_last_used_date.days} days)."
if not old_access_keys:
report.status = "PASS"
report.status_extended = f"User {user['user']} does not have unused access keys for {maximum_expiration_days} days."
findings.append(report)
return findings

View File

@@ -9,9 +9,7 @@ maximum_expiration_days = 45
class iam_disable_45_days_credentials(Check):
def execute(self) -> Check_Report_AWS:
findings = []
response = iam_client.users
for user in response:
for user in iam_client.users:
report = Check_Report_AWS(self.metadata())
report.resource_id = user.name
report.resource_arn = user.arn
@@ -38,4 +36,52 @@ class iam_disable_45_days_credentials(Check):
# Append report
findings.append(report)
for user in iam_client.credential_report:
report = Check_Report_AWS(self.metadata())
report.region = iam_client.region
report.resource_id = user["user"]
report.resource_arn = user["arn"]
if (
user["access_key_1_active"] != "true"
and user["access_key_2_active"] != "true"
):
report.status = "PASS"
report.status_extended = (
f"User {user['user']} does not have access keys."
)
else:
old_access_keys = False
if user["access_key_1_active"] == "true":
if user["access_key_1_last_used_date"] != "N/A":
access_key_1_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_1_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_1_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 1 in the last {maximum_expiration_days} days ({access_key_1_last_used_date.days} days)."
if user["access_key_2_active"] == "true":
if user["access_key_2_last_used_date"] != "N/A":
access_key_2_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_2_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_2_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 2 in the last {maximum_expiration_days} days ({access_key_2_last_used_date.days} days)."
if not old_access_keys:
report.status = "PASS"
report.status_extended = f"User {user['user']} does not have unused access keys for {maximum_expiration_days} days."
findings.append(report)
return findings

View File

@@ -9,13 +9,11 @@ maximum_expiration_days = 90
class iam_disable_90_days_credentials(Check):
def execute(self) -> Check_Report_AWS:
findings = []
response = iam_client.users
for user in response:
for user in iam_client.users:
report = Check_Report_AWS(self.metadata())
report.region = iam_client.region
report.resource_id = user.name
report.resource_arn = user.arn
report.region = iam_client.region
if user.password_last_used:
time_since_insertion = (
datetime.datetime.now()
@@ -25,17 +23,65 @@ class iam_disable_90_days_credentials(Check):
)
if time_since_insertion.days > maximum_expiration_days:
report.status = "FAIL"
report.status_extended = f"User {user.name} has not logged in to the console in the past 90 days."
report.status_extended = f"User {user.name} has not logged in to the console in the past {maximum_expiration_days} days."
else:
report.status = "PASS"
report.status_extended = f"User {user.name} has logged in to the console in the past 90 days."
report.status_extended = f"User {user.name} has logged in to the console in the past {maximum_expiration_days} days."
else:
report.status = "PASS"
report.status_extended = (
f"User {user.name} does not have a console password or is unused."
)
# Append report
findings.append(report)
for user in iam_client.credential_report:
report = Check_Report_AWS(self.metadata())
report.region = iam_client.region
report.resource_id = user["user"]
report.resource_arn = user["arn"]
if (
user["access_key_1_active"] != "true"
and user["access_key_2_active"] != "true"
):
report.status = "PASS"
report.status_extended = (
f"User {user['user']} does not have access keys."
)
else:
old_access_keys = False
if user["access_key_1_active"] == "true":
if user["access_key_1_last_used_date"] != "N/A":
access_key_1_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_1_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_1_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 1 in the last {maximum_expiration_days} days ({access_key_1_last_used_date.days} days)."
if user["access_key_2_active"] == "true":
if user["access_key_2_last_used_date"] != "N/A":
access_key_2_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_2_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_2_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 2 in the last {maximum_expiration_days} days ({access_key_2_last_used_date.days} days)."
if not old_access_keys:
report.status = "PASS"
report.status_extended = f"User {user['user']} does not have unused access keys for {maximum_expiration_days} days."
findings.append(report)
return findings

View File

@@ -21,6 +21,7 @@ class iam_no_custom_policy_permissive_role_assumption(Check):
if (
statement["Effect"] == "Allow"
and "Action" in statement
and "Resource" in statement
and "*" in statement["Resource"]
):
if type(statement["Action"]) == list:

View File

@@ -12,7 +12,7 @@ class iam_password_policy_expires_passwords_within_90_days_or_less(Check):
if iam_client.password_policy:
# Check if password policy expiration exists
if iam_client.password_policy.max_age:
if iam_client.password_policy.max_age < 90:
if iam_client.password_policy.max_age <= 90:
report.status = "PASS"
report.status_extended = f"Password expiration is set lower than 90 days ({iam_client.password_policy.max_age} days)."
else:

View File

@@ -26,7 +26,10 @@ class iam_rotate_access_key_90_days(Check):
)
else:
old_access_keys = False
if user["access_key_1_last_rotated"] != "N/A":
if (
user["access_key_1_last_rotated"] != "N/A"
and user["access_key_1_active"] == "true"
):
access_key_1_last_rotated = (
datetime.datetime.now()
- datetime.datetime.strptime(
@@ -38,7 +41,10 @@ class iam_rotate_access_key_90_days(Check):
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not rotated access key 1 in over 90 days ({access_key_1_last_rotated.days} days)."
if user["access_key_2_last_rotated"] != "N/A":
if (
user["access_key_2_last_rotated"] != "N/A"
and user["access_key_2_active"] == "true"
):
access_key_2_last_rotated = (
datetime.datetime.now()
- datetime.datetime.strptime(

View File

@@ -54,7 +54,7 @@ class RDS:
self.db_instances.append(
DBInstance(
id=instance["DBInstanceIdentifier"],
endpoint=instance["Endpoint"]["Address"],
endpoint=instance.get("Endpoint"),
engine=instance["Engine"],
status=instance["DBInstanceStatus"],
public=instance["PubliclyAccessible"],
@@ -171,7 +171,7 @@ class RDS:
class DBInstance(BaseModel):
id: str
endpoint: str
endpoint: Optional[dict]
engine: str
status: str
public: bool

View File

@@ -2,6 +2,8 @@ import json
import threading
from dataclasses import dataclass
from botocore.client import ClientError
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -44,35 +46,38 @@ class S3:
try:
list_buckets = self.client.list_buckets()
for bucket in list_buckets["Buckets"]:
try:
bucket_region = self.client.get_bucket_location(
Bucket=bucket["Name"]
)["LocationConstraint"]
if bucket_region == "EU": # If EU, bucket_region is eu-west-1
bucket_region = "eu-west-1"
if not bucket_region: # If Nonce, bucket_region is us-east-1
bucket_region = "us-east-1"
# Arn
arn = f"arn:{self.audited_partition}:s3:::{bucket['Name']}"
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
# Check if there are filter regions
if audit_info.audited_regions:
if bucket_region in audit_info.audited_regions:
buckets.append(
Bucket(bucket["Name"], arn, bucket_region)
)
else:
bucket_region = self.client.get_bucket_location(Bucket=bucket["Name"])[
"LocationConstraint"
]
if bucket_region == "EU": # If EU, bucket_region is eu-west-1
bucket_region = "eu-west-1"
if not bucket_region: # If Nonce, bucket_region is us-east-1
bucket_region = "us-east-1"
# Arn
arn = f"arn:{self.audited_partition}:s3:::{bucket['Name']}"
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
# Check if there are filter regions
if audit_info.audited_regions:
if bucket_region in audit_info.audited_regions:
buckets.append(Bucket(bucket["Name"], arn, bucket_region))
except Exception as error:
logger.error(
f"{bucket} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
buckets.append(Bucket(bucket["Name"], arn, bucket_region))
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchBucket":
logger.warning(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
if bucket:
logger.error(
f"{bucket} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return buckets
def __get_bucket_versioning__(self, bucket):
@@ -89,9 +94,14 @@ class S3:
if "Enabled" == bucket_versioning["MFADelete"]:
bucket.mfa_delete = True
except Exception as error:
logger.error(
f"{bucket.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
if bucket.region:
logger.error(
f"{bucket.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_bucket_encryption__(self, bucket):
logger.info("S3 - Get buckets encryption...")
@@ -107,10 +117,14 @@ class S3:
except Exception as error:
if "ServerSideEncryptionConfigurationNotFoundError" in str(error):
bucket.encryption = None
else:
elif regional_client:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_bucket_logging__(self, bucket):
logger.info("S3 - Get buckets logging...")
@@ -123,9 +137,14 @@ class S3:
"TargetBucket"
]
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
if regional_client:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_public_access_block__(self, bucket):
logger.info("S3 - Get buckets public access block...")
@@ -148,9 +167,14 @@ class S3:
}
)
else:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
if regional_client:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_bucket_acl__(self, bucket):
logger.info("S3 - Get buckets acl...")
@@ -171,9 +195,14 @@ class S3:
grantees.append(grantee)
bucket.acl_grantees = grantees
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
if regional_client:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_bucket_policy__(self, bucket):
logger.info("S3 - Get buckets policy...")
@@ -186,9 +215,14 @@ class S3:
if "NoSuchBucketPolicy" in str(error):
bucket.policy = {}
else:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
if regional_client:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_bucket_ownership_controls__(self, bucket):
logger.info("S3 - Get buckets ownership controls...")
@@ -201,9 +235,14 @@ class S3:
if "OwnershipControlsNotFoundError" in str(error):
bucket.ownership = None
else:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
if regional_client:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
################## S3Control

View File

@@ -24,18 +24,26 @@ class ssm_document_secrets(Check):
if document.content:
temp_env_data_file = tempfile.NamedTemporaryFile(delete=False)
temp_env_data_file.write(
bytes(json.dumps(document.content), encoding="raw_unicode_escape")
bytes(
json.dumps(document.content, indent=2),
encoding="raw_unicode_escape",
)
)
temp_env_data_file.close()
secrets = SecretsCollection()
with default_settings():
secrets.scan_file(temp_env_data_file.name)
if secrets.json():
report.status = "FAIL"
report.status_extended = (
f"Potential secret found in SSM Document {document.name}"
detect_secrets_output = secrets.json()
if detect_secrets_output:
secrets_string = ", ".join(
[
f"{secret['type']} on line {secret['line_number']}"
for secret in detect_secrets_output[temp_env_data_file.name]
]
)
report.status = "FAIL"
report.status_extended = f"Potential secret found in SSM Document {document.name} -> {secrets_string}"
os.remove(temp_env_data_file.name)

View File

@@ -9,7 +9,7 @@
"SubServiceName": "service_endpoint",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "medium",
"ResourceType": "AwsEc2Vpc",
"ResourceType": "AwsEc2VpcEndpointService",
"Description": "Find trust boundaries in VPC endpoint services allowlisted principles.",
"Risk": "Account VPC could be linked to other accounts.",
"RelatedUrl": "",

View File

@@ -9,7 +9,7 @@
"SubServiceName": "route_table",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "medium",
"ResourceType": "AwsEc2Vpc",
"ResourceType": "AwsEc2VpcPeeringConnection",
"Description": "Ensure routing tables for VPC peering are least access.",
"Risk": "Being highly selective in peering routing tables is a very effective way of minimizing the impact of breach as resources outside of these routes are inaccessible to the peered VPC.",
"RelatedUrl": "",

View File

@@ -1,6 +1,8 @@
import json
import threading
from dataclasses import dataclass
from typing import Optional
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
@@ -50,10 +52,10 @@ class VPC:
):
self.vpcs.append(
VPCs(
vpc["VpcId"],
vpc["IsDefault"],
vpc["CidrBlock"],
regional_client.region,
id=vpc["VpcId"],
default=vpc["IsDefault"],
cidr_block=vpc["CidrBlock"],
region=regional_client.region,
)
)
except Exception as error:
@@ -74,14 +76,17 @@ class VPC:
conn["VpcPeeringConnectionId"], self.audit_resources
)
):
conn["AccepterVpcInfo"]["CidrBlock"] = None
self.vpc_peering_connections.append(
VpcPeeringConnection(
conn["VpcPeeringConnectionId"],
conn["AccepterVpcInfo"]["VpcId"],
conn["AccepterVpcInfo"]["CidrBlock"],
conn["RequesterVpcInfo"]["VpcId"],
conn["RequesterVpcInfo"]["CidrBlock"],
regional_client.region,
id=conn["VpcPeeringConnectionId"],
accepter_vpc=conn["AccepterVpcInfo"]["VpcId"],
accepter_cidr=conn["AccepterVpcInfo"].get("CidrBlock"),
requester_vpc=conn["RequesterVpcInfo"]["VpcId"],
requester_cidr=conn["RequesterVpcInfo"].get(
"CidrBlock"
),
region=regional_client.region,
)
)
except Exception as error:
@@ -113,8 +118,8 @@ class VPC:
destination_cidrs.append(route["DestinationCidrBlock"])
conn.route_tables.append(
Route(
route_table["RouteTableId"],
destination_cidrs,
id=route_table["RouteTableId"],
destination_cidrs=destination_cidrs,
)
)
except Exception as error:
@@ -160,12 +165,12 @@ class VPC:
endpoint_policy = json.loads(endpoint["PolicyDocument"])
self.vpc_endpoints.append(
VpcEndpoint(
endpoint["VpcEndpointId"],
endpoint["VpcId"],
endpoint["State"],
endpoint_policy,
endpoint["OwnerId"],
regional_client.region,
id=endpoint["VpcEndpointId"],
vpc_id=endpoint["VpcId"],
state=endpoint["State"],
policy_document=endpoint_policy,
owner_id=endpoint["OwnerId"],
region=regional_client.region,
)
)
except Exception as error:
@@ -189,10 +194,10 @@ class VPC:
):
self.vpc_endpoint_services.append(
VpcEndpointService(
endpoint["ServiceId"],
endpoint["ServiceName"],
endpoint["Owner"],
regional_client.region,
id=endpoint["ServiceId"],
service=endpoint["ServiceName"],
owner_id=endpoint["Owner"],
region=regional_client.region,
)
)
except Exception as error:
@@ -217,114 +222,41 @@ class VPC:
logger.error(f"{error.__class__.__name__}: {error}")
@dataclass
class VPCs:
class VPCs(BaseModel):
id: str
default: bool
cidr_block: str
flow_log: bool
flow_log: bool = False
region: str
def __init__(
self,
id,
default,
cidr_block,
region,
):
self.id = id
self.default = default
self.cidr_block = cidr_block
self.flow_log = False
self.region = region
@dataclass
class Route:
class Route(BaseModel):
id: str
destination_cidrs: list[str]
def __init__(
self,
id,
destination_cidrs,
):
self.id = id
self.destination_cidrs = destination_cidrs
@dataclass
class VpcPeeringConnection:
class VpcPeeringConnection(BaseModel):
id: str
accepter_vpc: str
accepter_cidr: str
accepter_cidr: Optional[str]
requester_vpc: str
requester_cidr: str
route_tables: list[Route]
requester_cidr: Optional[str]
route_tables: list[Route] = []
region: str
def __init__(
self,
id,
accepter_vpc,
accepter_cidr,
requester_vpc,
requester_cidr,
region,
):
self.id = id
self.accepter_vpc = accepter_vpc
self.accepter_cidr = accepter_cidr
self.requester_vpc = requester_vpc
self.requester_cidr = requester_cidr
self.route_tables = []
self.region = region
@dataclass
class VpcEndpoint:
class VpcEndpoint(BaseModel):
id: str
vpc_id: str
state: str
policy_document: dict
owner_id: list[Route]
owner_id: str
region: str
def __init__(
self,
id,
vpc_id,
state,
policy_document,
owner_id,
region,
):
self.id = id
self.vpc_id = vpc_id
self.state = state
self.policy_document = policy_document
self.owner_id = owner_id
self.route_tables = []
self.region = region
@dataclass
class VpcEndpointService:
class VpcEndpointService(BaseModel):
id: str
service: str
owner_id: str
allowed_principals: list
allowed_principals: list = []
region: str
def __init__(
self,
id,
service,
owner_id,
region,
):
self.id = id
self.service = service
self.owner_id = owner_id
self.allowed_principals = []
self.region = region

View File

@@ -1,66 +1,86 @@
[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core"]
[project]
name = "prowler-cloud"
# https://peps.python.org/pep-0440/
version = "3.2.0"
authors = [{ name = "Toni de la Fuente", email = "toni@blyx.com" }]
maintainers = [
{ name = "Sergio Garcia", email = "sergio@verica.io" },
{ name = "Nacho Rivera", email = "nacho@verica.io" },
{ name = "Pepe Fagoaga", email = "pepe@verica.io" },
]
description = "Prowler is an Open Source security tool to perform Cloud Security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains more than 240 controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks."
readme = "README.md"
requires-python = ">=3.9"
license = { text = "Apache-2.0" }
# https://peps.python.org/pep-0621/
[tool.poetry]
authors = ["Toni de la Fuente <toni@blyx.com>"]
classifiers = [
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"License :: OSI Approved :: Apache Software License",
"License :: OSI Approved :: Apache Software License"
]
# https://peps.python.org/pep-0631/
dependencies = [
"colorama ~=0.4.4",
"boto3 ~=1.26.17",
"arnparse ~=0.0.2",
"botocore ~=1.29.18",
"pydantic ~=1.9.1",
"schema ~=0.7.5",
"shodan ~=1.28.0",
"detect-secrets ~=1.4.0",
"alive-progress ~=2.4.1",
"tabulate ~=0.9.0",
"azure-identity ~=1.12.0",
"azure-storage-blob ~=12.14.1",
"msgraph-core ~=0.2.2",
"azure-mgmt-subscription ~=3.1.1",
"azure-mgmt-authorization ~=3.0.0",
"azure-mgmt-security ~=3.0.0",
"azure-mgmt-storage ~=21.0.0",
description = "Prowler is an Open Source security tool to perform Cloud Security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains more than 240 controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks."
license = "Apache-2.0"
maintainers = [
"Sergio Garcia <sergio@verica.io>",
"Nacho Rivera <nacho@verica.io>",
"Pepe Fagoaga <pepe@verica.io>"
]
name = "prowler"
packages = [
{include = "prowler"}
]
readme = "README.md"
version = "3.2.2"
[project.urls]
"Homepage" = "https://github.com/prowler-cloud/prowler"
"Documentation" = "https://docs.prowler.cloud"
"Issue tracker" = "https://github.com/prowler-cloud/prowler/issues"
"Changelog" = "https://github.com/prowler-cloud/prowler/releases"
[tool.poetry.dependencies]
alive-progress = "2.4.1"
arnparse = "0.0.2"
azure-identity = "1.12.0"
azure-mgmt-authorization = "3.0.0"
azure-mgmt-security = "3.0.0"
azure-mgmt-storage = "21.0.0"
azure-mgmt-subscription = "3.1.1"
azure-storage-blob = "12.14.1"
boto3 = "1.26.74"
botocore = "1.29.74"
colorama = "0.4.5"
detect-secrets = "1.4.0"
mkdocs = {version = "1.4.2", optional = true}
mkdocs-material = {version = "8.2.1", optional = true}
msgraph-core = "0.2.2"
pydantic = "1.10.5"
python = "^3.9"
schema = "0.7.5"
shodan = "1.28.0"
tabulate = "0.9.0"
# https://setuptools.pypa.io/en/latest/userguide/package_discovery.html
[tool.setuptools]
include-package-data = true
package-dir = { "prowler" = "prowler" }
[tool.poetry.extras]
docs = ["mkdocs", "mkdocs-material"]
[tool.setuptools.package-data]
# We need the above pattern to include our data files
"*" = ["*.yaml", "*.json"]
[tool.poetry.group.dev.dependencies]
bandit = "1.7.4"
black = "22.10.0"
coverage = "7.1.0"
docker = "6.0.0"
flake8 = "5.0.4"
freezegun = "1.2.1"
moto = "4.1.2"
openapi-spec-validator = "0.5.5"
pylint = "2.16.2"
pytest = "7.2.1"
pytest-xdist = "3.2.0"
safety = "2.3.5"
sure = "2.0.1"
vulture = "2.7"
[project.scripts]
[tool.poetry.scripts]
prowler = "prowler.__main__:prowler"
[tool.poetry.urls]
"Changelog" = "https://github.com/prowler-cloud/prowler/releases"
"Documentation" = "https://docs.prowler.cloud"
"Homepage" = "https://github.com/prowler-cloud/prowler"
"Issue tracker" = "https://github.com/prowler-cloud/prowler/issues"
[tool.poetry-version-plugin]
source = "init"
[tool.poetry_bumpversion.file."prowler/config/config.py"]
replace = 'prowler_version = "{new_version}"'
search = 'prowler_version = "{current_version}"'
[tool.pytest.ini_options]
pythonpath = [
"."

View File

@@ -1,3 +0,0 @@
# Documentation static site generator & deployment tool
mkdocs>=1.3.0
mkdocs-material>=8.2.1

View File

@@ -9,6 +9,7 @@ from prowler.lib.check.check import (
exclude_checks_to_run,
exclude_services_to_run,
get_checks_from_input_arn,
get_regions_from_audit_resources,
list_modules,
list_services,
parse_checks_from_file,
@@ -288,14 +289,27 @@ class Test_Check:
def test_get_checks_from_input_arn(self):
audit_resources = ["arn:aws:lambda:us-east-1:123456789:function:test-lambda"]
provider = "aws"
expected_checks = {
"awslambda_function_url_cors_policy",
expected_checks = [
"awslambda_function_invoke_api_operations_cloudtrail_logging_enabled",
"awslambda_function_no_secrets_in_code",
}
"awslambda_function_url_cors_policy",
]
recovered_checks = get_checks_from_input_arn(audit_resources, provider)
assert recovered_checks == expected_checks
def test_get_regions_from_audit_resources(self):
audit_resources = [
"arn:aws:lambda:us-east-1:123456789:function:test-lambda",
"arn:aws:iam::106908755756:policy/test",
"arn:aws:ec2:eu-west-1:106908755756:security-group/sg-test",
]
expected_regions = [
"us-east-1",
"eu-west-1",
]
recovered_regions = get_regions_from_audit_resources(audit_resources)
assert recovered_regions == expected_regions
# def test_parse_checks_from_compliance_framework_two(self):
# test_case = {
# "input": {"compliance_frameworks": ["cis_v1.4_aws", "ens_v3_aws"]},

View File

@@ -738,6 +738,12 @@ class Test_Parser:
parsed = self.parser.parse(command)
assert parsed.security_hub
def test_aws_parser_skip_sh_update(self):
argument = "--skip-sh-update"
command = [prowler_command, argument]
parsed = self.parser.parse(command)
assert parsed.skip_sh_update
def test_aws_parser_quick_inventory_short(self):
argument = "-i"
command = [prowler_command, argument]

View File

@@ -291,7 +291,7 @@ class Test_Outputs:
) = expected.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
expected.Severity = Severity(Label=finding.check_metadata.Severity.upper())
expected.Title = finding.check_metadata.CheckTitle
expected.Description = finding.check_metadata.Description
expected.Description = finding.status_extended
expected.Resources = [
Resource(
Id="test-arn",

View File

@@ -76,7 +76,7 @@ class Test_awslambda_function_no_secrets_in_code:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Potential secret found in Lambda function {function_name} code"
== f"Potential secret found in Lambda function {function_name} code -> lambda_function.py: Secret Keyword on line 3"
)
def test_function_code_without_secrets(self):

View File

@@ -102,7 +102,7 @@ class Test_awslambda_function_no_secrets_in_variables:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Potential secret found in Lambda function {function_name} variables"
== f"Potential secret found in Lambda function {function_name} variables -> Secret Keyword in variable db_password"
)
def test_function_no_secrets_in_variables(self):

View File

@@ -115,3 +115,51 @@ class Test_cloudtrail_logs_s3_bucket_access_logging_enabled:
)
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]
@mock_cloudtrail
@mock_s3
def test_bucket_cross_account(self):
cloudtrail_client_us_east_1 = client("cloudtrail", region_name="us-east-1")
s3_client_us_east_1 = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client_us_east_1.create_bucket(Bucket=bucket_name_us)
trail_us = cloudtrail_client_us_east_1.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
from prowler.providers.aws.services.s3.s3_service import S3
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.s3_client",
new=S3(current_audit_info),
) as s3_client:
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled import (
cloudtrail_logs_s3_bucket_access_logging_enabled,
)
# Empty s3 buckets to simulate the bucket is in another account
s3_client.buckets = []
check = cloudtrail_logs_s3_bucket_access_logging_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert search(
"in another account out of Prowler's permissions scope, please check it manually",
result[0].status_extended,
)
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]

View File

@@ -148,6 +148,7 @@ class Test_cloudtrail_logs_s3_bucket_is_not_publicly_accessible:
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
from prowler.providers.aws.services.s3.s3_service import S3
current_audit_info.audited_partition = "aws"
@@ -155,19 +156,71 @@ class Test_cloudtrail_logs_s3_bucket_is_not_publicly_accessible:
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible import (
cloudtrail_logs_s3_bucket_is_not_publicly_accessible,
)
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.s3_client",
new=S3(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible import (
cloudtrail_logs_s3_bucket_is_not_publicly_accessible,
)
check = cloudtrail_logs_s3_bucket_is_not_publicly_accessible()
result = check.execute()
check = cloudtrail_logs_s3_bucket_is_not_publicly_accessible()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]
assert search(
result[0].status_extended,
f"S3 Bucket {bucket_name_us} from single region trail {trail_name_us} is not publicly accessible",
)
assert len(result) == 1
assert result[0].status == "PASS"
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]
assert search(
result[0].status_extended,
f"S3 Bucket {bucket_name_us} from single region trail {trail_name_us} is not publicly accessible",
)
@mock_cloudtrail
@mock_s3
def test_trail_bucket_cross_account(self):
cloudtrail_client = client("cloudtrail", region_name="us-east-1")
s3_client = client("s3", region_name="us-east-1")
trail_name_us = "trail_test_us"
bucket_name_us = "bucket_test_us"
s3_client.create_bucket(Bucket=bucket_name_us)
trail_us = cloudtrail_client.create_trail(
Name=trail_name_us, S3BucketName=bucket_name_us, IsMultiRegionTrail=False
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
Cloudtrail,
)
from prowler.providers.aws.services.s3.s3_service import S3
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
with mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.s3_client",
new=S3(current_audit_info),
) as s3_client:
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_is_not_publicly_accessible.cloudtrail_logs_s3_bucket_is_not_publicly_accessible import (
cloudtrail_logs_s3_bucket_is_not_publicly_accessible,
)
# Empty s3 buckets to simulate the bucket is in another account
s3_client.buckets = []
check = cloudtrail_logs_s3_bucket_is_not_publicly_accessible()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert result[0].resource_id == trail_name_us
assert result[0].resource_arn == trail_us["TrailARN"]
assert search(
"is a cross-account bucket in another account out of Prowler's permissions scope",
result[0].status_extended,
)

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{ ($.eventName = "CreateNetworkAcl") || ($.eventName = "CreateNetworkAclEntry") || ($.eventName = "DeleteNetworkAcl") || ($.eventName = "DeleteNetworkAclEntry") || ($.eventName = "ReplaceNetworkAclEntry") || ($.eventName = "ReplaceNetworkAclAssociation") }',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_acls_alarm_configured.cloudwatch_changes_to_network_acls_alarm_configured import (
cloudwatch_changes_to_network_acls_alarm_configured,
)
check = cloudwatch_changes_to_network_acls_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{($.eventName = "CreateCustomerGateway") || ($.eventName = "DeleteCustomerGateway") || ($.eventName = "AttachInternetGateway") || ($.eventName = "CreateInternetGateway") || ($.eventName = "DeleteInternetGateway") || ($.eventName = "DetachInternetGateway") }',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_gateways_alarm_configured.cloudwatch_changes_to_network_gateways_alarm_configured import (
cloudwatch_changes_to_network_gateways_alarm_configured,
)
check = cloudwatch_changes_to_network_gateways_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{($.eventName = "CreateRoute") || ($.eventName = "CreateRouteTable") || ($.eventName = "ReplaceRoute") || ($.eventName = "ReplaceRouteTableAssociation")|| ($.eventName = "DeleteRouteTable") || ($.eventName = "DeleteRoute") || ($.eventName = "DisassociateRouteTable") }',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_network_route_tables_alarm_configured.cloudwatch_changes_to_network_route_tables_alarm_configured import (
cloudwatch_changes_to_network_route_tables_alarm_configured,
)
check = cloudwatch_changes_to_network_route_tables_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{ ($.eventName = "CreateVpc") || ($.eventName = "DeleteVpc") || ($.eventName = "ModifyVpcAttribute") || ($.eventName = "AcceptVpcPeeringConnection") || ($.eventName = "CreateVpcPeeringConnection") || ($.eventName = "DeleteVpcPeeringConnection") || ($.eventName = "RejectVpcPeeringConnection") || ($.eventName = "AttachClassicLinkVpc") || ($.eventName = "DetachClassicLinkVpc") || ($.eventName = "DisableVpcClassicLink") || ($.eventName = "EnableVpcClassicLink") }',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_changes_to_vpcs_alarm_configured.cloudwatch_changes_to_vpcs_alarm_configured import (
cloudwatch_changes_to_vpcs_alarm_configured,
)
check = cloudwatch_changes_to_vpcs_alarm_configured()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -300,3 +300,82 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{($.eventSource = "config.amazonaws.com") && (($.eventName="StopConfigurationRecorder")||($.eventName="DeleteDeliveryChannel")|| ($.eventName="PutDeliveryChannel")||($.eventName="PutConfigurationRecorder"))}',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -300,3 +300,82 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{($.eventName = "CreateTrail") || ($.eventName = "UpdateTrail") || ($.eventName = "DeleteTrail") || ($.eventName = "StartLogging") || ($.eventName = "StopLogging")}',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled.cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled import (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled,
)
check = (
cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{($.eventName = "ConsoleLogin") && ($.errorMessage = "Failed authentication")}',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_authentication_failures.cloudwatch_log_metric_filter_authentication_failures import (
cloudwatch_log_metric_filter_authentication_failures,
)
check = cloudwatch_log_metric_filter_authentication_failures()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_aws_organizations_changes:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{ ($.eventSource = "organizations.amazonaws.com") && ($.eventName = "AcceptHandshake") || ($.eventName = "AttachPolicy") || ($.eventName = "CancelHandshake") || ($.eventName = "CreateAccount") || ($.eventName = "CreateOrganization") || ($.eventName = "CreateOrganizationalUnit") || ($.eventName = "CreatePolicy") || ($.eventName = "DeclineHandshake") || ($.eventName = "DeleteOrganization") || ($.eventName = "DeleteOrganizationalUnit") || ($.eventName = "DeletePolicy") || ($.eventName = "EnableAllFeatures") || ($.eventName = "EnablePolicyType") || ($.eventName = "InviteAccountToOrganization") || ($.eventName = "LeaveOrganization") || ($.eventName = "DetachPolicy") || ($.eventName = "DisablePolicyType") || ($.eventName = "MoveAccount") || ($.eventName = "RemoveAccountFromOrganization") || ($.eventName = "UpdateOrganizationalUnit") || ($.eventName = "UpdatePolicy") }',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_aws_organizations_changes.cloudwatch_log_metric_filter_aws_organizations_changes import (
cloudwatch_log_metric_filter_aws_organizations_changes,
)
check = cloudwatch_log_metric_filter_aws_organizations_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -300,3 +300,82 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{($.eventSource = "kms.amazonaws.com") &&(($.eventName="DisableKey")||($.eventName="ScheduleKeyDeletion")) }',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk.cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk import (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk,
)
check = (
cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{($.eventSource = "s3.amazonaws.com") && (($.eventName = "PutBucketAcl") || ($.eventName = "PutBucketPolicy") || ($.eventName = "PutBucketCors") || ($.eventName = "PutBucketLifecycle") || ($.eventName = "PutBucketReplication") || ($.eventName = "DeleteBucketPolicy") || ($.eventName = "DeleteBucketCors") || ($.eventName = "DeleteBucketLifecycle") || ($.eventName = "DeleteBucketReplication")) }',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes.cloudwatch_log_metric_filter_for_s3_bucket_policy_changes import (
cloudwatch_log_metric_filter_for_s3_bucket_policy_changes,
)
check = cloudwatch_log_metric_filter_for_s3_bucket_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{($.eventName="DeleteGroupPolicy")||($.eventName="DeleteRolePolicy")||($.eventName="DeleteUserPolicy")||($.eventName="PutGroupPolicy")||($.eventName="PutRolePolicy")||($.eventName="PutUserPolicy")||($.eventName="CreatePolicy")||($.eventName="DeletePolicy")||($.eventName="CreatePolicyVersion")||($.eventName="DeletePolicyVersion")||($.eventName="AttachRolePolicy")||($.eventName="DetachRolePolicy")||($.eventName="AttachUserPolicy")||($.eventName="DetachUserPolicy")||($.eventName="AttachGroupPolicy")||($.eventName="DetachGroupPolicy")}',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_policy_changes.cloudwatch_log_metric_filter_policy_changes import (
cloudwatch_log_metric_filter_policy_changes,
)
check = cloudwatch_log_metric_filter_policy_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{ $.userIdentity.type = "Root" && $.userIdentity.invokedBy NOT EXISTS && $.eventType != "AwsServiceEvent" }',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_root_usage.cloudwatch_log_metric_filter_root_usage import (
cloudwatch_log_metric_filter_root_usage,
)
check = cloudwatch_log_metric_filter_root_usage()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{($.eventName = "AuthorizeSecurityGroupIngress") || ($.eventName = "AuthorizeSecurityGroupEgress") || ($.eventName = "RevokeSecurityGroupIngress") || ($.eventName = "RevokeSecurityGroupEgress") || ($.eventName = "CreateSecurityGroup") || ($.eventName = "DeleteSecurityGroup") }',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_security_group_changes.cloudwatch_log_metric_filter_security_group_changes import (
cloudwatch_log_metric_filter_security_group_changes,
)
check = cloudwatch_log_metric_filter_security_group_changes()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{ ($.eventName = "ConsoleLogin") && ($.additionalEventData.MFAUsed != "Yes") }',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_sign_in_without_mfa.cloudwatch_log_metric_filter_sign_in_without_mfa import (
cloudwatch_log_metric_filter_sign_in_without_mfa,
)
check = cloudwatch_log_metric_filter_sign_in_without_mfa()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -290,3 +290,80 @@ class Test_cloudwatch_log_metric_filter_unauthorized_api_calls:
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"
@mock_logs
@mock_cloudtrail
@mock_cloudwatch
@mock_s3
def test_cloudwatch_trail_with_log_group_with_metric_and_alarm_with_quotes(self):
cloudtrail_client = client("cloudtrail", region_name=AWS_REGION)
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION)
logs_client = client("logs", region_name=AWS_REGION)
s3_client = client("s3", region_name=AWS_REGION)
s3_client.create_bucket(Bucket="test")
logs_client.create_log_group(logGroupName="/log-group/test")
cloudtrail_client.create_trail(
Name="test_trail",
S3BucketName="test",
CloudWatchLogsLogGroupArn=f"arn:aws:logs:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:log-group:/log-group/test:*",
)
logs_client.put_metric_filter(
logGroupName="/log-group/test",
filterName="test-filter",
filterPattern='{ ($.errorCode = "*UnauthorizedOperation") || ($.errorCode = "AccessDenied*") || ($.sourceIPAddress!="delivery.logs.amazonaws.com") || ($.eventName!="HeadBucket") }',
metricTransformations=[
{
"metricName": "my-metric",
"metricNamespace": "my-namespace",
"metricValue": "$.value",
}
],
)
cloudwatch_client.put_metric_alarm(
AlarmName="test-alarm",
MetricName="my-metric",
Namespace="my-namespace",
Period=10,
EvaluationPeriods=5,
Statistic="Average",
Threshold=2,
ComparisonOperator="GreaterThanThreshold",
ActionsEnabled=True,
)
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
Logs,
)
current_audit_info.audited_partition = "aws"
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
Cloudtrail,
)
with mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.logs_client",
new=Logs(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_client",
new=CloudWatch(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudtrail_client",
new=Cloudtrail(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.cloudwatch.cloudwatch_log_metric_filter_unauthorized_api_calls.cloudwatch_log_metric_filter_unauthorized_api_calls import (
cloudwatch_log_metric_filter_unauthorized_api_calls,
)
check = cloudwatch_log_metric_filter_unauthorized_api_calls()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "CloudWatch log group /log-group/test found with metric filter test-filter and alarms set."
)
assert result[0].resource_id == "/log-group/test"

View File

@@ -102,7 +102,7 @@ class Test_ecs_task_definitions_no_environment_secrets:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Potential secret found in variables of ECS task definition {task_name} with revision {task_revision}"
== f"Potential secret found in variables of ECS task definition {task_name} with revision {task_revision} -> Secret Keyword on line 2"
)
assert result[0].resource_id == f"{task_name}:1"
assert (

View File

@@ -42,7 +42,7 @@ class Test_iam_disable_30_days_credentials_test:
@mock_iam
def test_iam_user_not_logged_30_days(self):
password_last_used = (
datetime.datetime.now() - datetime.timedelta(days=40)
datetime.datetime.now() - datetime.timedelta(days=60)
).strftime("%Y-%m-%d %H:%M:%S+00:00")
iam_client = client("iam")
user = "test-user"
@@ -90,7 +90,6 @@ class Test_iam_disable_30_days_credentials_test:
)
service_client.users[0].password_last_used = ""
# raise Exception
check = iam_disable_30_days_credentials()
result = check.execute()
@@ -101,3 +100,108 @@ class Test_iam_disable_30_days_credentials_test:
)
assert result[0].resource_id == user
assert result[0].resource_arn == arn
@mock_iam
def test_user_no_access_keys(self):
iam_client = client("iam")
user = "test-user"
arn = iam_client.create_user(UserName=user)["User"]["Arn"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.iam.iam_disable_30_days_credentials.iam_disable_30_days_credentials.iam_client",
new=IAM(current_audit_info),
) as service_client:
from prowler.providers.aws.services.iam.iam_disable_30_days_credentials.iam_disable_30_days_credentials import (
iam_disable_30_days_credentials,
)
service_client.credential_report[0]["access_key_1_last_rotated"] == "N/A"
service_client.credential_report[0]["access_key_2_last_rotated"] == "N/A"
check = iam_disable_30_days_credentials()
result = check.execute()
assert result[-1].status == "PASS"
assert (
result[-1].status_extended == f"User {user} does not have access keys."
)
assert result[-1].resource_id == user
assert result[-1].resource_arn == arn
@mock_iam
def test_user_access_key_1_not_used(self):
credentials_last_rotated = (
datetime.datetime.now() - datetime.timedelta(days=100)
).strftime("%Y-%m-%dT%H:%M:%S+00:00")
iam_client = client("iam")
user = "test-user"
arn = iam_client.create_user(UserName=user)["User"]["Arn"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.iam.iam_disable_30_days_credentials.iam_disable_30_days_credentials.iam_client",
new=IAM(current_audit_info),
) as service_client:
from prowler.providers.aws.services.iam.iam_disable_30_days_credentials.iam_disable_30_days_credentials import (
iam_disable_30_days_credentials,
)
service_client.credential_report[0]["access_key_1_active"] = "true"
service_client.credential_report[0][
"access_key_1_last_used_date"
] = credentials_last_rotated
check = iam_disable_30_days_credentials()
result = check.execute()
assert result[-1].status == "FAIL"
assert (
result[-1].status_extended
== f"User {user} has not used access key 1 in the last 30 days (100 days)."
)
assert result[-1].resource_id == user
assert result[-1].resource_arn == arn
@mock_iam
def test_user_access_key_2_not_used(self):
credentials_last_rotated = (
datetime.datetime.now() - datetime.timedelta(days=100)
).strftime("%Y-%m-%dT%H:%M:%S+00:00")
iam_client = client("iam")
user = "test-user"
arn = iam_client.create_user(UserName=user)["User"]["Arn"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.iam.iam_disable_30_days_credentials.iam_disable_30_days_credentials.iam_client",
new=IAM(current_audit_info),
) as service_client:
from prowler.providers.aws.services.iam.iam_disable_30_days_credentials.iam_disable_30_days_credentials import (
iam_disable_30_days_credentials,
)
service_client.credential_report[0]["access_key_2_active"] = "true"
service_client.credential_report[0][
"access_key_2_last_used_date"
] = credentials_last_rotated
check = iam_disable_30_days_credentials()
result = check.execute()
assert result[-1].status == "FAIL"
assert (
result[-1].status_extended
== f"User {user} has not used access key 2 in the last 30 days (100 days)."
)
assert result[-1].resource_id == user
assert result[-1].resource_arn == arn

View File

@@ -100,3 +100,108 @@ class Test_iam_disable_45_days_credentials_test:
)
assert result[0].resource_id == user
assert result[0].resource_arn == arn
@mock_iam
def test_user_no_access_keys(self):
iam_client = client("iam")
user = "test-user"
arn = iam_client.create_user(UserName=user)["User"]["Arn"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.iam.iam_disable_45_days_credentials.iam_disable_45_days_credentials.iam_client",
new=IAM(current_audit_info),
) as service_client:
from prowler.providers.aws.services.iam.iam_disable_45_days_credentials.iam_disable_45_days_credentials import (
iam_disable_45_days_credentials,
)
service_client.credential_report[0]["access_key_1_last_rotated"] == "N/A"
service_client.credential_report[0]["access_key_2_last_rotated"] == "N/A"
check = iam_disable_45_days_credentials()
result = check.execute()
assert result[-1].status == "PASS"
assert (
result[-1].status_extended == f"User {user} does not have access keys."
)
assert result[-1].resource_id == user
assert result[-1].resource_arn == arn
@mock_iam
def test_user_access_key_1_not_used(self):
credentials_last_rotated = (
datetime.datetime.now() - datetime.timedelta(days=100)
).strftime("%Y-%m-%dT%H:%M:%S+00:00")
iam_client = client("iam")
user = "test-user"
arn = iam_client.create_user(UserName=user)["User"]["Arn"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.iam.iam_disable_45_days_credentials.iam_disable_45_days_credentials.iam_client",
new=IAM(current_audit_info),
) as service_client:
from prowler.providers.aws.services.iam.iam_disable_45_days_credentials.iam_disable_45_days_credentials import (
iam_disable_45_days_credentials,
)
service_client.credential_report[0]["access_key_1_active"] = "true"
service_client.credential_report[0][
"access_key_1_last_used_date"
] = credentials_last_rotated
check = iam_disable_45_days_credentials()
result = check.execute()
assert result[-1].status == "FAIL"
assert (
result[-1].status_extended
== f"User {user} has not used access key 1 in the last 45 days (100 days)."
)
assert result[-1].resource_id == user
assert result[-1].resource_arn == arn
@mock_iam
def test_user_access_key_2_not_used(self):
credentials_last_rotated = (
datetime.datetime.now() - datetime.timedelta(days=100)
).strftime("%Y-%m-%dT%H:%M:%S+00:00")
iam_client = client("iam")
user = "test-user"
arn = iam_client.create_user(UserName=user)["User"]["Arn"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.iam.iam_disable_45_days_credentials.iam_disable_45_days_credentials.iam_client",
new=IAM(current_audit_info),
) as service_client:
from prowler.providers.aws.services.iam.iam_disable_45_days_credentials.iam_disable_45_days_credentials import (
iam_disable_45_days_credentials,
)
service_client.credential_report[0]["access_key_2_active"] = "true"
service_client.credential_report[0][
"access_key_2_last_used_date"
] = credentials_last_rotated
check = iam_disable_45_days_credentials()
result = check.execute()
assert result[-1].status == "FAIL"
assert (
result[-1].status_extended
== f"User {user} has not used access key 2 in the last 45 days (100 days)."
)
assert result[-1].resource_id == user
assert result[-1].resource_arn == arn

View File

@@ -90,7 +90,6 @@ class Test_iam_disable_90_days_credentials_test:
)
service_client.users[0].password_last_used = ""
# raise Exception
check = iam_disable_90_days_credentials()
result = check.execute()
@@ -101,3 +100,108 @@ class Test_iam_disable_90_days_credentials_test:
)
assert result[0].resource_id == user
assert result[0].resource_arn == arn
@mock_iam
def test_user_no_access_keys(self):
iam_client = client("iam")
user = "test-user"
arn = iam_client.create_user(UserName=user)["User"]["Arn"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.iam.iam_disable_90_days_credentials.iam_disable_90_days_credentials.iam_client",
new=IAM(current_audit_info),
) as service_client:
from prowler.providers.aws.services.iam.iam_disable_90_days_credentials.iam_disable_90_days_credentials import (
iam_disable_90_days_credentials,
)
service_client.credential_report[0]["access_key_1_last_rotated"] == "N/A"
service_client.credential_report[0]["access_key_2_last_rotated"] == "N/A"
check = iam_disable_90_days_credentials()
result = check.execute()
assert result[-1].status == "PASS"
assert (
result[-1].status_extended == f"User {user} does not have access keys."
)
assert result[-1].resource_id == user
assert result[-1].resource_arn == arn
@mock_iam
def test_user_access_key_1_not_used(self):
credentials_last_rotated = (
datetime.datetime.now() - datetime.timedelta(days=100)
).strftime("%Y-%m-%dT%H:%M:%S+00:00")
iam_client = client("iam")
user = "test-user"
arn = iam_client.create_user(UserName=user)["User"]["Arn"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.iam.iam_disable_90_days_credentials.iam_disable_90_days_credentials.iam_client",
new=IAM(current_audit_info),
) as service_client:
from prowler.providers.aws.services.iam.iam_disable_90_days_credentials.iam_disable_90_days_credentials import (
iam_disable_90_days_credentials,
)
service_client.credential_report[0]["access_key_1_active"] = "true"
service_client.credential_report[0][
"access_key_1_last_used_date"
] = credentials_last_rotated
check = iam_disable_90_days_credentials()
result = check.execute()
assert result[-1].status == "FAIL"
assert (
result[-1].status_extended
== f"User {user} has not used access key 1 in the last 90 days (100 days)."
)
assert result[-1].resource_id == user
assert result[-1].resource_arn == arn
@mock_iam
def test_user_access_key_2_not_used(self):
credentials_last_rotated = (
datetime.datetime.now() - datetime.timedelta(days=100)
).strftime("%Y-%m-%dT%H:%M:%S+00:00")
iam_client = client("iam")
user = "test-user"
arn = iam_client.create_user(UserName=user)["User"]["Arn"]
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM
current_audit_info.audited_partition = "aws"
with mock.patch(
"prowler.providers.aws.services.iam.iam_disable_90_days_credentials.iam_disable_90_days_credentials.iam_client",
new=IAM(current_audit_info),
) as service_client:
from prowler.providers.aws.services.iam.iam_disable_90_days_credentials.iam_disable_90_days_credentials import (
iam_disable_90_days_credentials,
)
service_client.credential_report[0]["access_key_2_active"] = "true"
service_client.credential_report[0][
"access_key_2_last_used_date"
] = credentials_last_rotated
check = iam_disable_90_days_credentials()
result = check.execute()
assert result[-1].status == "FAIL"
assert (
result[-1].status_extended
== f"User {user} has not used access key 2 in the last 90 days (100 days)."
)
assert result[-1].resource_id == user
assert result[-1].resource_arn == arn

View File

@@ -72,3 +72,37 @@ class Test_iam_password_policy_expires_passwords_within_90_days_or_less:
"Password expiration is set greater than 90 days",
result[0].status_extended,
)
@mock_iam
def test_password_expiration_just_90(self):
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.iam.iam_service import IAM, PasswordPolicy
with mock.patch(
"prowler.providers.aws.services.iam.iam_password_policy_expires_passwords_within_90_days_or_less.iam_password_policy_expires_passwords_within_90_days_or_less.iam_client",
new=IAM(current_audit_info),
) as service_client:
from prowler.providers.aws.services.iam.iam_password_policy_expires_passwords_within_90_days_or_less.iam_password_policy_expires_passwords_within_90_days_or_less import (
iam_password_policy_expires_passwords_within_90_days_or_less,
)
service_client.password_policy = PasswordPolicy(
length=10,
symbols=True,
numbers=True,
uppercase=True,
lowercase=True,
allow_change=True,
expiration=True,
max_age=90,
reuse_prevention=2,
hard_expiry=True,
)
check = iam_password_policy_expires_passwords_within_90_days_or_less()
result = check.execute()
assert result[0].status == "PASS"
assert result[0].resource_id == "password_policy"
assert search(
"Password expiration is set lower than 90 days",
result[0].status_extended,
)

View File

@@ -59,6 +59,7 @@ class Test_iam_rotate_access_key_90_days_test:
iam_rotate_access_key_90_days,
)
service_client.credential_report[0]["access_key_1_active"] = "true"
service_client.credential_report[0][
"access_key_1_last_rotated"
] = credentials_last_rotated
@@ -95,6 +96,7 @@ class Test_iam_rotate_access_key_90_days_test:
iam_rotate_access_key_90_days,
)
service_client.credential_report[0]["access_key_2_active"] = "true"
service_client.credential_report[0][
"access_key_2_last_rotated"
] = credentials_last_rotated

View File

@@ -90,7 +90,7 @@ class Test_RDS_Service:
assert rds.db_instances[0].id == "db-master-1"
assert rds.db_instances[0].region == AWS_REGION
assert (
rds.db_instances[0].endpoint
rds.db_instances[0].endpoint["Address"]
== "db-master-1.aaaaaaaaaa.us-east-1.rds.amazonaws.com"
)
assert rds.db_instances[0].status == "available"

View File

@@ -59,7 +59,7 @@ class Test_ssm_documents_secrets:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Potential secret found in SSM Document {document_name}"
== f"Potential secret found in SSM Document {document_name} -> Secret Keyword on line 2"
)
def test_document_no_secrets(self):

View File

@@ -75,8 +75,8 @@ class Test_vpc_peering_routing_tables_with_least_privilege:
service_client.vpc_peering_connections[0].route_tables = [
Route(
main_route_table_id,
["10.12.23.44/32"],
id=main_route_table_id,
destination_cidrs=["10.12.23.44/32"],
)
]
check = vpc_peering_routing_tables_with_least_privilege()
@@ -138,8 +138,8 @@ class Test_vpc_peering_routing_tables_with_least_privilege:
service_client.vpc_peering_connections[0].route_tables = [
Route(
main_route_table_id,
["10.0.0.0/16"],
id=main_route_table_id,
destination_cidrs=["10.0.0.0/16"],
)
]
check = vpc_peering_routing_tables_with_least_privilege()

View File

@@ -163,8 +163,8 @@ class Test_VPC_Service:
vpc = VPC(audit_info)
vpc.vpc_peering_connections[0].route_tables = [
Route(
main_route_table_id,
["10.0.0.4/24"],
id=main_route_table_id,
destination_cidrs=["10.0.0.4/24"],
)
]
assert len(vpc.vpc_peering_connections[0].route_tables) == 1

View File

@@ -2,7 +2,7 @@ import toml
data = toml.load("pyproject.toml")
# Modify field
data["project"]["name"] = "prowler"
data["tool"]["poetry"]["name"] = "prowler-cloud"
# To use the dump function, you need to open the file in 'write' mode
f = open("pyproject.toml", "w")