Compare commits

..

77 Commits
3.1.4 ... 3.2.2

Author SHA1 Message Date
github-actions
bffe2a2c63 chore(release): 3.2.2 2023-02-23 12:34:11 +00:00
Sergio Garcia
849b703828 chore(resource-based scan): execute only applicable checks (#1934) 2023-02-23 13:30:21 +01:00
Sergio Garcia
4b935a40b6 fix(metadata): remove us-east-1 in remediation (#1958) 2023-02-23 13:19:10 +01:00
Sergio Garcia
5873a23ccb fix(key errors): solver EMR and IAM errrors (#1957) 2023-02-23 13:15:00 +01:00
Nacho Rivera
eae2786825 fix(cloudtrail): Handle when the CloudTrail bucket is in another account (#1956) 2023-02-23 13:04:32 +01:00
github-actions[bot]
6407386de5 chore(regions_update): Changes in regions for AWS services. (#1952)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-23 12:24:36 +01:00
Sergio Garcia
3fe950723f fix(actions): add README to docker action and filter steps for releases (#1955) 2023-02-23 12:22:41 +01:00
Sergio Garcia
52bf6acd46 chore(regions): add secret token to avoid stuck checks (#1954) 2023-02-23 12:11:54 +01:00
Sergio Garcia
9590e7d7e0 chore(poetry): make python-poetry as packaging and dependency manager (#1935)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-02-23 11:50:29 +01:00
github-actions[bot]
7a08140a2d chore(regions_update): Changes in regions for AWS services. (#1950)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-23 08:42:36 +01:00
dependabot[bot]
d1491cfbd1 build(deps): bump boto3 from 1.26.74 to 1.26.76 (#1948)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-22 08:01:13 +01:00
dependabot[bot]
695b80549d build(deps): bump botocore from 1.29.75 to 1.29.76 (#1946)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-22 07:50:39 +01:00
Sergio Garcia
11c60a637f release: 3.2.1 (#1945) 2023-02-21 17:22:02 +01:00
Sergio Garcia
844ad70bb9 fix(cloudwatch): allow " in regex patterns (#1943) 2023-02-21 16:46:23 +01:00
Sergio Garcia
5ac7cde577 chore(iam_disable_N_days_credentials): improve checks logic (#1923) 2023-02-21 15:20:33 +01:00
Sergio Garcia
ce3ef0550f chore(Security Hub): add status extended to Security Hub (#1921) 2023-02-21 15:11:43 +01:00
Sergio Garcia
813f3e7d42 fix(errors): handle errors when S3 buckets or EC2 instances are deleted (#1942) 2023-02-21 12:31:23 +01:00
Sergio Garcia
d03f97af6b fix(regions): add unique branch name (#1941) 2023-02-21 11:53:36 +01:00
github-actions[bot]
019ab0286d chore(regions_update): Changes in regions for AWS services. (#1940)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-21 11:47:03 +01:00
Fennerr
c6647b4706 chore(secrets): Improve the status_extended with more information (#1937)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-02-21 11:37:20 +01:00
Sergio Garcia
f913536d88 fix(services): solve errors in EMR, RDS, S3 and VPC services (#1913) 2023-02-21 11:11:39 +01:00
dependabot[bot]
640d1bd176 build(deps-dev): bump moto from 4.1.2 to 4.1.3 (#1939)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-21 07:48:08 +01:00
dependabot[bot]
66baccf528 build(deps): bump botocore from 1.29.74 to 1.29.75 (#1938)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-21 07:32:44 +01:00
Sergio Garcia
6e6dacbace chore(security hub): add --skip-sh-update (#1911) 2023-02-20 09:58:00 +01:00
dependabot[bot]
cdbb10fb26 build(deps): bump boto3 from 1.26.72 to 1.26.74 (#1933)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-20 07:56:40 +01:00
dependabot[bot]
c34ba3918c build(deps): bump botocore from 1.29.73 to 1.29.74 (#1932)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-20 07:34:20 +01:00
Fennerr
fa228c876c fix(iam_rotate_access_key_90_days): check only active access keys (#1929)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-02-17 12:53:28 +01:00
dependabot[bot]
2f4d0af7d7 build(deps): bump botocore from 1.29.72 to 1.29.73 (#1926)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-17 12:14:23 +01:00
github-actions[bot]
2d3e5235a9 chore(regions_update): Changes in regions for AWS services. (#1927)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-17 11:13:13 +01:00
dependabot[bot]
8e91ccaa54 build(deps): bump boto3 from 1.26.71 to 1.26.72 (#1925)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-17 10:56:19 +01:00
Fennerr
6955658b36 fix(quick_inventory): handle ApiGateway resources (#1924)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-02-16 18:29:23 +01:00
Fennerr
dbb44401fd fix(ecs_task_definitions_no_environment_secrets): dump_env_vars is reintialised (#1922) 2023-02-16 15:59:53 +01:00
dependabot[bot]
b42ed70c84 build(deps): bump botocore from 1.29.71 to 1.29.72 (#1919)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-16 14:21:46 +01:00
dependabot[bot]
a28276d823 build(deps): bump pydantic from 1.10.4 to 1.10.5 (#1918)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-16 13:51:37 +01:00
Pepe Fagoaga
fa4b27dd0e fix(compliance): Set Version as optional and fix list (#1899)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-02-16 12:47:39 +01:00
dependabot[bot]
0be44d5c49 build(deps): bump boto3 from 1.26.70 to 1.26.71 (#1920)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-16 12:38:10 +01:00
github-actions[bot]
2514596276 chore(regions_update): Changes in regions for AWS services. (#1910)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-16 11:56:10 +01:00
dependabot[bot]
7008d2a953 build(deps): bump botocore from 1.29.70 to 1.29.71 (#1909)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-15 07:39:16 +01:00
dependabot[bot]
2539fedfc4 build(deps): bump boto3 from 1.26.69 to 1.26.70 (#1908)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-15 07:12:18 +01:00
Ignacio Dominguez
b453df7591 fix(iam-credentials-expiration): IAM password policy expires passwords fix (#1903)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-02-14 13:54:58 +01:00
Pepe Fagoaga
9e5d5edcba fix(codebuild): Handle endTime in builds (#1900) 2023-02-14 11:27:53 +01:00
Nacho Rivera
2d5de6ff99 fix(cross account): cloudtrail s3 bucket logging (#1902) 2023-02-14 11:23:31 +01:00
github-actions[bot]
259e9f1c17 chore(regions_update): Changes in regions for AWS services. (#1901)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-14 10:28:04 +01:00
dependabot[bot]
daeb53009e build(deps): bump botocore from 1.29.69 to 1.29.70 (#1898)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-14 08:27:14 +01:00
dependabot[bot]
f12d271ca5 build(deps): bump boto3 from 1.26.51 to 1.26.69 (#1897)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-14 07:55:26 +01:00
dependabot[bot]
965185ca3b build(deps-dev): bump pylint from 2.16.1 to 2.16.2 (#1896) 2023-02-14 07:35:29 +01:00
Pepe Fagoaga
9c484f6a78 Release: 3.2.0 (#1894) 2023-02-13 15:42:57 +01:00
Fennerr
de18c3c722 docs: Minor changes to logging (#1893) 2023-02-13 15:31:23 +01:00
Fennerr
9be753b281 docs: Minor changes to the intro paragraph (#1892) 2023-02-13 15:20:48 +01:00
Pepe Fagoaga
d6ae122de1 docs: Boto3 configuration (#1885)
Co-authored-by: Toni de la Fuente <toni@blyx.com>
2023-02-13 15:20:33 +01:00
Pepe Fagoaga
c6b90044f2 chore(Dockerfile): Remove build files (#1886) 2023-02-13 15:19:05 +01:00
Nacho Rivera
14898b6422 fix(Azure_Audit_Info): Added audited_resources field (#1891) 2023-02-13 15:17:11 +01:00
Fennerr
26294b0759 docs: Update AWS Role Assumption (#1890) 2023-02-13 15:13:22 +01:00
Nacho Rivera
6da45b5c2b fix(list_checks): arn filtering checks after audit_info set (#1887) 2023-02-13 14:57:42 +01:00
Acknosyn
674332fddd update(logging): fix plural grammar for checks execution message (#1680)
Co-authored-by: Francesco Badraun <francesco.badraun@zxsecurity.co.nz>
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-02-13 14:33:34 +01:00
Sergio Garcia
ab8942d05a fix(service errors): solve errors in IAM, S3, Lambda, DS, Cloudfront services (#1882)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-02-13 10:35:04 +01:00
github-actions[bot]
29790b8a5c chore(regions_update): Changes in regions for AWS services. (#1884)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-13 10:01:43 +01:00
dependabot[bot]
4a4c26ffeb build(deps): bump botocore from 1.29.51 to 1.29.69 (#1883)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-02-13 09:19:01 +01:00
Sergio Garcia
25c9bc07b2 chore(compliance): add manual checks to compliance CSV (#1872)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-02-10 12:38:13 +01:00
Nacho Rivera
d22d4c4c83 fix(cloudtrail_multi_region_enabled): reformat check (#1880) 2023-02-10 12:34:53 +01:00
Sergio Garcia
d88640fd20 fix(errors): solve several services errors (AccessAnalyzer, AppStream, KMS, S3, SQS, R53, IAM, CodeArtifact and EC2) (#1879) 2023-02-10 12:26:00 +01:00
github-actions[bot]
57a2fca3a4 chore(regions_update): Changes in regions for AWS services. (#1878)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-10 11:25:00 +01:00
Sergio Garcia
f796688c84 fix(metadata): typo in appstream_fleet_session_disconnect_timeout.metadata.json (#1875) 2023-02-09 16:22:19 +01:00
alexr3y
d6bbf8b7cc update(compliance): ENS RD2022 Spanish security framework updates (#1809)
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-02-09 14:14:38 +01:00
Nacho Rivera
37ec460f64 fix(hardware mfa): changed hardware mfa description (#1873) 2023-02-09 14:06:54 +01:00
Sergio Garcia
004b9c95e4 fix(key_errors): handle Key Errors in Lambda and EMR (#1871)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-09 10:32:00 +01:00
github-actions[bot]
86e27b465a chore(regions_update): Changes in regions for AWS services. (#1870)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-09 10:17:18 +01:00
Nacho Rivera
5e9afddc3a fix(permissive role assumption): actions list handling (#1869) 2023-02-09 10:06:53 +01:00
Pepe Fagoaga
de281535b1 feat(boto3-config): Use standard retrier (#1868)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2023-02-09 09:58:47 +01:00
Pedro Martín González
9df7def14e feat(compliance): Add 17 new security compliance frameworks for AWS (#1824)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-09 07:39:57 +01:00
Sergio Garcia
5b9db9795d feat(new check): add accessanalyzer_enabled check (#1864)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-08 17:39:25 +01:00
Sergio Garcia
7d2ce7e6ab fix(action): do not trigger action when editing release (#1865) 2023-02-08 17:34:02 +01:00
Oleksandr Mykytenko
3e807af2b2 fix(checks): added validation for non-existing VPC endpoint policy (#1859)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-08 12:13:22 +01:00
Oleksandr Mykytenko
4c64dc7885 Fixed elbv2 service for GWLB resources (#1860)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-08 10:38:34 +01:00
github-actions[bot]
e7a7874b34 chore(regions_update): Changes in regions for AWS services. (#1863)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-02-08 10:36:03 +01:00
dependabot[bot]
c78a47788b build(deps): bump cryptography from 39.0.0 to 39.0.1 (#1862) 2023-02-08 08:02:47 +01:00
dependabot[bot]
922698c5d9 build(deps-dev): bump pytest-xdist from 3.1.0 to 3.2.0 (#1858) 2023-02-07 18:04:30 +01:00
214 changed files with 25329 additions and 2815 deletions

View File

@@ -10,7 +10,7 @@ on:
- "docs/**"
release:
types: [published, edited]
types: [published]
env:
AWS_REGION_STG: eu-west-1
@@ -47,9 +47,25 @@ jobs:
container-build:
# needs: dockerfile-linter
runs-on: ubuntu-latest
env:
POETRY_VIRTUALENVS_CREATE: "false"
steps:
- name: Checkout
uses: actions/checkout@v3
- name: setup python (release)
if: github.event_name == 'release'
uses: actions/setup-python@v2
with:
python-version: 3.9 #install the python needed
- name: Install dependencies (release)
if: github.event_name == 'release'
run: |
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: Update Prowler version (release)
if: github.event_name == 'release'
run: |
poetry version ${{ github.event.release.tag_name }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Build

View File

@@ -24,9 +24,9 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pipenv
pipenv install --dev
pipenv run pip list
pip install poetry
poetry install
poetry run pip list
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
grep '"tag_name":' | \
sed -E 's/.*"v([^"]+)".*/\1/' \
@@ -34,25 +34,25 @@ jobs:
&& chmod +x /tmp/hadolint
- name: Lint with flake8
run: |
pipenv run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
- name: Checking format with black
run: |
pipenv run black --check .
poetry run black --check .
- name: Lint with pylint
run: |
pipenv run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
- name: Bandit
run: |
pipenv run bandit -q -lll -x '*_test.py,./contrib/' -r .
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
- name: Safety
run: |
pipenv run safety check
poetry run safety check
- name: Vulture
run: |
pipenv run vulture --exclude "contrib" --min-confidence 100 .
poetry run vulture --exclude "contrib" --min-confidence 100 .
- name: Hadolint
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
- name: Test with pytest
run: |
pipenv run pytest tests -n auto
poetry run pytest tests -n auto

View File

@@ -5,11 +5,14 @@ on:
types: [published]
env:
GITHUB_BRANCH: ${{ github.event.release.tag_name }}
RELEASE_TAG: ${{ github.event.release.tag_name }}
GITHUB_BRANCH: master
jobs:
release-prowler-job:
runs-on: ubuntu-latest
env:
POETRY_VIRTUALENVS_CREATE: "false"
name: Release Prowler to PyPI
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
@@ -22,20 +25,45 @@ jobs:
python-version: 3.9 #install the python needed
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build toml --upgrade
- name: Build package
run: python -m build
- name: Publish prowler-cloud package to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.PYPI_API_TOKEN }}
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: Change version and Build package
run: |
poetry version ${{ env.RELEASE_TAG }}
git config user.name "github-actions"
git config user.email "<noreply@github.com>"
git add prowler/config/config.py pyproject.toml
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}"
git push -f origin ${{ env.RELEASE_TAG }}
poetry build
- name: Publish prowler package to PyPI
run: |
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
poetry publish
- name: Replicate PyPi Package
run: |
rm -rf ./dist && rm -rf ./build && rm -rf prowler_cloud.egg-info
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
python util/replicate_pypi_package.py
python -m build
- name: Publish prowler package to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
poetry build
- name: Publish prowler-cloud package to PyPI
run: |
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
poetry publish
# Create pull request with new version
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
with:
password: ${{ secrets.PYPI_API_TOKEN }}
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}."
branch: release-${{ env.RELEASE_TAG }}
labels: "status/waiting-for-revision, severity/low"
title: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}"
body: |
### Description
This PR updates Prowler Version to ${{ env.RELEASE_TAG }}.
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.

View File

@@ -52,9 +52,9 @@ jobs:
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services."
branch: "aws-services-regions-updated"
branch: "aws-services-regions-updated-${{ github.sha }}"
labels: "status/waiting-for-revision, severity/low"
title: "chore(regions_update): Changes in regions for AWS services."
body: |

View File

@@ -1,7 +1,7 @@
repos:
## GENERAL
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
rev: v4.4.0
hooks:
- id: check-merge-conflict
- id: check-yaml
@@ -13,14 +13,21 @@ repos:
- id: pretty-format-json
args: ["--autofix", --no-sort-keys, --no-ensure-ascii]
## TOML
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.7.0
hooks:
- id: pretty-format-toml
args: [--autofix]
## BASH
- repo: https://github.com/koalaman/shellcheck-precommit
rev: v0.8.0
rev: v0.9.0
hooks:
- id: shellcheck
## PYTHON
- repo: https://github.com/myint/autoflake
rev: v1.7.7
rev: v2.0.1
hooks:
- id: autoflake
args:
@@ -31,18 +38,18 @@ repos:
]
- repo: https://github.com/timothycrosley/isort
rev: 5.10.1
rev: 5.12.0
hooks:
- id: isort
args: ["--profile", "black"]
- repo: https://github.com/psf/black
rev: 22.10.0
rev: 23.1.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 5.0.4
rev: 6.0.0
hooks:
- id: flake8
exclude: contrib
@@ -54,7 +61,7 @@ repos:
- id: check-pipfile-lock
- repo: https://github.com/hadolint/hadolint
rev: v2.12.0
rev: v2.12.1-beta
hooks:
- id: hadolint
args: ["--ignore=DL3013"]

23
.readthedocs.yaml Normal file
View File

@@ -0,0 +1,23 @@
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
build:
os: "ubuntu-22.04"
tools:
python: "3.9"
jobs:
post_create_environment:
# Install poetry
# https://python-poetry.org/docs/#installing-manually
- pip install poetry
# Tell poetry to not use a virtual environment
- poetry config virtualenvs.create false
post_install:
- poetry install -E docs
mkdocs:
configuration: mkdocs.yml

View File

@@ -16,6 +16,7 @@ USER prowler
WORKDIR /home/prowler
COPY prowler/ /home/prowler/prowler/
COPY pyproject.toml /home/prowler
COPY README.md /home/prowler
# Install dependencies
ENV HOME='/home/prowler'
@@ -24,9 +25,9 @@ ENV PATH="$HOME/.local/bin:$PATH"
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir .
# Remove Prowler directory
# Remove Prowler directory and build files
USER 0
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
USER prowler
ENTRYPOINT ["prowler"]

View File

@@ -24,11 +24,11 @@ lint: ## Lint Code
##@ PyPI
pypi-clean: ## Delete the distribution files
rm -rf ./dist && rm -rf ./build && rm -rf prowler_cloud.egg-info
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
pypi-build: ## Build package
$(MAKE) pypi-clean && \
python3 -m build
poetry build
pypi-upload: ## Upload package
python3 -m twine upload --repository pypi dist/*

42
Pipfile
View File

@@ -1,42 +0,0 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
colorama = "0.4.4"
boto3 = "1.26.3"
arnparse = "0.0.2"
botocore = "1.27.8"
pydantic = "1.9.1"
schema = "0.7.5"
shodan = "1.28.0"
detect-secrets = "1.4.0"
alive-progress = "2.4.1"
tabulate = "0.9.0"
azure-identity = "1.12.0"
azure-storage-blob = "12.14.1"
msgraph-core = "0.2.2"
azure-mgmt-subscription = "3.1.1"
azure-mgmt-authorization = "3.0.0"
azure-mgmt-security = "3.0.0"
azure-mgmt-storage = "21.0.0"
[dev-packages]
black = "22.10.0"
pylint = "2.16.1"
flake8 = "5.0.4"
bandit = "1.7.4"
safety = "2.3.1"
vulture = "2.7"
moto = "4.1.2"
docker = "6.0.0"
openapi-spec-validator = "0.5.5"
pytest = "7.2.1"
pytest-xdist = "2.5.0"
coverage = "7.1.0"
sure = "2.0.1"
freezegun = "1.2.1"
[requires]
python_version = "3.9"

1703
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -64,13 +64,13 @@ The container images are available here:
## From Github
Python >= 3.9 is required with pip and pipenv:
Python >= 3.9 is required with pip and poetry:
```
git clone https://github.com/prowler-cloud/prowler
cd prowler
pipenv shell
pipenv install
poetry shell
poetry install
python prowler.py -v
```

View File

@@ -5,7 +5,7 @@
# Prowler Documentation
**Welcome to [Prowler Open Source v3](https://github.com/prowler-cloud/prowler/) Documentation!** 📄
**Welcome to [Prowler Open Source v3](https://github.com/prowler-cloud/prowler/) Documentation!** 📄
For **Prowler v2 Documentation**, please go [here](https://github.com/prowler-cloud/prowler/tree/2.12.0) to the branch and its README.md.
@@ -118,7 +118,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
./configure --enable-optimizations
sudo make altinstall
python3.9 --version
cd
cd
```
_Commands_:

View File

@@ -0,0 +1,31 @@
# Boto3 Retrier Configuration
Prowler's AWS Provider uses the Boto3 [Standard](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html) retry mode to assist in retrying client calls to AWS services when these kinds of errors or exceptions are experienced. This mode includes the following behaviours:
- A default value of 3 for maximum retry attempts. This can be overwritten with the `--aws-retries-max-attempts 5` argument.
- Retry attempts for an expanded list of errors/exceptions:
```
# Transient errors/exceptions
RequestTimeout
RequestTimeoutException
PriorRequestNotComplete
ConnectionError
HTTPClientError
# Service-side throttling/limit errors and exceptions
Throttling
ThrottlingException
ThrottledException
RequestThrottledException
TooManyRequestsException
ProvisionedThroughputExceededException
TransactionInProgressException
RequestLimitExceeded
BandwidthLimitExceeded
LimitExceededException
RequestThrottled
SlowDown
EC2ThrottledException
```
- Retry attempts on nondescriptive, transient error codes. Specifically, these HTTP status codes: 500, 502, 503, 504.
- Any retry attempt will include an exponential backoff by a base factor of 2 for a maximum backoff time of 20 seconds.

View File

@@ -5,6 +5,13 @@ Prowler uses the AWS SDK (Boto3) underneath so it uses the same authentication m
However, there are few ways to run Prowler against multiple accounts using IAM Assume Role feature depending on each use case:
1. You can just set up your custom profile inside `~/.aws/config` with all needed information about the role to assume then call it with `prowler aws -p/--profile your-custom-profile`.
- An example profile that performs role-chaining is given below. The `credential_source` can either be set to `Environment`, `Ec2InstanceMetadata`, or `EcsContainer`.
- Alternatively, you could use the `source_profile` instead of `credential_source` to specify a separate named profile that contains IAM user credentials with permission to assume the target the role. More information can be found [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html).
```
[profile crossaccountrole]
role_arn = arn:aws:iam::234567890123:role/SomeRole
credential_source = EcsContainer
```
2. You can use `-R`/`--role <role_arn>` and Prowler will get those temporary credentials using `Boto3` and run against that given account.
```sh
@@ -20,6 +27,6 @@ prowler aws -T/--session-duration <seconds> -I/--external-id <external_id> -R ar
To create a role to be assumed in one or multiple accounts you can use either as CloudFormation Stack or StackSet the following [template](https://github.com/prowler-cloud/prowler/blob/master/permissions/create_role_to_assume_cfn.yaml) and adapt it.
> _NOTE 1 about Session Duration_: Depending on the mount of checks you run and the size of your infrastructure, Prowler may require more than 1 hour to finish. Use option `-T <seconds>` to allow up to 12h (43200 seconds). To allow more than 1h you need to modify _"Maximum CLI/API session duration"_ for that particular role, read more [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session).
> _NOTE 1 about Session Duration_: Depending on the amount of checks you run and the size of your infrastructure, Prowler may require more than 1 hour to finish. Use option `-T <seconds>` to allow up to 12h (43200 seconds). To allow more than 1h you need to modify _"Maximum CLI/API session duration"_ for that particular role, read more [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session).
> _NOTE 2 about Session Duration_: Bear in mind that if you are using roles assumed by role chaining there is a hard limit of 1 hour so consider not using role chaining if possible, read more about that, in foot note 1 below the table [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html).

View File

@@ -36,3 +36,12 @@ or for only one filtered region like eu-west-1:
Once you run findings for first time you will be able to see Prowler findings in Findings section:
![Screenshot 2020-10-29 at 10 29 05 PM](https://user-images.githubusercontent.com/3985464/97634676-66c9f600-1a36-11eb-9341-70feb06f6331.png)
## Skip sending updates of findings to Security Hub
By default, Prowler archives all its findings in Security Hub that have not appeared in the last scan.
You can skip this logic by using the option `--skip-sh-update` so Prowler will not archive older findings:
```sh
./prowler -S --skip-sh-update
```

View File

@@ -1,6 +1,6 @@
# Check mapping between Prowler v3 and v2
Prowler v3 comes with different identifiers but we maintained the same checks than v2. The reason of the change is because in previows versions of Prowler, check names were mostly based on CIS Benchmark for AWS, in v3 all checks are independent from any security framework and they have its own name and ID.
Prowler v3 comes with different identifiers but we maintained the same checks that were implemented in v2. The reason for this change is because in previows versions of Prowler, check names were mostly based on CIS Benchmark for AWS. In v3 all checks are independent from any security framework and they have its own name and ID.
If you need more information about how new compliance implementation works in Prowler v3 see [Compliance](../../compliance/) section.

View File

@@ -11,6 +11,24 @@ Currently, the available frameworks are:
- `cis_1.4_aws`
- `cis_1.5_aws`
- `ens_rd2022_aws`
- `aws_audit_manager_control_tower_guardrails_aws`
- `aws_foundational_security_best_practices_aws`
- `cisa_aws`
- `fedramp_low_revision_4_aws`
- `fedramp_moderate_revision_4_aws`
- `ffiec_aws`
- `gdpr_aws`
- `gxp_eu_annex_11_aws`
- `gxp_21_cfr_part_11_aws`
- `hipaa_aws`
- `nist_800_53_revision_4_aws`
- `nist_800_53_revision_5_aws`
- `nist_800_171_revision_2_aws`
- `nist_csf_1.1_aws`
- `pci_3.2.1_aws`
- `rbi_cyber_security_framework_aws`
- `soc2_aws`
## List Requirements of Compliance Frameworks
For each compliance framework, you can use option `--list-compliance-requirements` to list its requirements:

View File

@@ -1,16 +1,16 @@
# Logging
Prowler has a logging feature to be as transparent as possible so you can see every action that is going on will the tool is been executing.
Prowler has a logging feature to be as transparent as possible, so that you can see every action that is being performed whilst the tool is being executing.
## Set Log Level
## Set Log Level
There are different log levels depending on the logging information that is desired to be displayed:
- **DEBUG**: it will show low-level logs of Python.
- **INFO**: it will show all the API Calls that are being used in the provider.
- **WARNING**: it will show the resources that are being **allowlisted**.
- **ERROR**: it will show the errors, e.g., not authorized actions.
- **CRITICAL**: default log level, if a critical log appears, it will **exit** Prowlers execution.
- **DEBUG**: It will show low-level logs from Python.
- **INFO**: It will show all the API calls that are being invoked by the provider.
- **WARNING**: It will show all resources that are being **allowlisted**.
- **ERROR**: It will show any errors, e.g., not authorized actions.
- **CRITICAL**: The default log level. If a critical log appears, it will **exit** Prowlers execution.
You can establish the log level of Prowler with `--log-level` option:
@@ -20,9 +20,9 @@ prowler <provider> --log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL}
> By default, Prowler will run with the `CRITICAL` log level, since critical errors will abort the execution.
## Export Logs to File
## Export Logs to File
Prowler allows you to export the logs in json format with `--log-file` option:
Prowler allows you to export the logs in json format with the `--log-file` option:
```console
prowler <provider> --log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL} --log-file <file_name>.json
@@ -45,4 +45,4 @@ An example of a log file will be the following:
"message": "eu-west-2 -- ClientError[124]: An error occurred (UnauthorizedOperation) when calling the DescribeNetworkAcls operation: You are not authorized to perform this operation."
}
> NOTE: Each finding is a `json` object.
> NOTE: Each finding is represented as a `json` object.

View File

@@ -25,19 +25,19 @@ repo_url: https://github.com/prowler-cloud/prowler/
repo_name: prowler-cloud/prowler
nav:
- Getting Started:
- Overview: index.md
- Requirements: getting-started/requirements.md
- Tutorials:
- Miscellaneous: tutorials/misc.md
- Reporting: tutorials/reporting.md
- Compliance: tutorials/compliance.md
- Quick Inventory: tutorials/quick-inventory.md
- Configuration File: tutorials/configuration_file.md
- Logging: tutorials/logging.md
- Allowlist: tutorials/allowlist.md
- Pentesting: tutorials/pentesting.md
- AWS:
- Getting Started:
- Overview: index.md
- Requirements: getting-started/requirements.md
- Tutorials:
- Miscellaneous: tutorials/misc.md
- Reporting: tutorials/reporting.md
- Compliance: tutorials/compliance.md
- Quick Inventory: tutorials/quick-inventory.md
- Configuration File: tutorials/configuration_file.md
- Logging: tutorials/logging.md
- Allowlist: tutorials/allowlist.md
- Pentesting: tutorials/pentesting.md
- AWS:
- Assume Role: tutorials/aws/role-assumption.md
- AWS Security Hub: tutorials/aws/securityhub.md
- AWS Organizations: tutorials/aws/organizations.md
@@ -46,14 +46,15 @@ nav:
- Checks v2 to v3 Mapping: tutorials/aws/v2_to_v3_checks_mapping.md
- Tag-based Scan: tutorials/aws/tag-based-scan.md
- Resource ARNs based Scan: tutorials/aws/resource-arn-based-scan.md
- Azure:
- Boto3 Configuration: tutorials/aws/boto3-configuration.md
- Azure:
- Authentication: tutorials/azure/authentication.md
- Subscriptions: tutorials/azure/subscriptions.md
- Security: security.md
- Contact Us: contact.md
- Troubleshooting: troubleshooting.md
- About: about.md
- ProwlerPro: https://prowler.pro
- Security: security.md
- Contact Us: contact.md
- Troubleshooting: troubleshooting.md
- About: about.md
- ProwlerPro: https://prowler.pro
# Customization
extra:
consent:

2516
poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -10,6 +10,8 @@ from prowler.lib.check.check import (
exclude_checks_to_run,
exclude_services_to_run,
execute_checks,
get_checks_from_input_arn,
get_regions_from_audit_resources,
list_categories,
list_services,
print_categories,
@@ -99,9 +101,6 @@ def prowler():
)
sys.exit()
# Set the audit info based on the selected provider
audit_info = set_provider_audit_info(provider, args.__dict__)
# Load checks to execute
checks_to_execute = load_checks_to_execute(
bulk_checks_metadata,
@@ -113,7 +112,6 @@ def prowler():
compliance_framework,
categories,
provider,
audit_info,
)
# Exclude checks if -e/--excluded-checks
@@ -134,6 +132,18 @@ def prowler():
print_checks(provider, checks_to_execute, bulk_checks_metadata)
sys.exit()
# Set the audit info based on the selected provider
audit_info = set_provider_audit_info(provider, args.__dict__)
# Once the audit_info is set and we have the eventual checks from arn, it is time to exclude the others
if audit_info.audit_resources:
audit_info.audited_regions = get_regions_from_audit_resources(
audit_info.audit_resources
)
checks_to_execute = get_checks_from_input_arn(
audit_info.audit_resources, provider
)
# Parse content from Allowlist file and get it, if necessary, from S3
if provider == "aws" and args.allowlist_file:
allowlist_file = parse_allowlist_file(audit_info, args.allowlist_file)
@@ -197,7 +207,7 @@ def prowler():
)
# Resolve previous fails of Security Hub
if provider == "aws" and args.security_hub:
if provider == "aws" and args.security_hub and not args.skip_sh_update:
resolve_security_hub_previous_findings(args.output_directory, audit_info)
# Display summary table
@@ -210,14 +220,15 @@ def prowler():
)
if compliance_framework and findings:
# Display compliance table
display_compliance_table(
findings,
bulk_checks_metadata,
compliance_framework,
audit_output_options.output_filename,
audit_output_options.output_directory,
)
for compliance in compliance_framework:
# Display compliance table
display_compliance_table(
findings,
bulk_checks_metadata,
compliance,
audit_output_options.output_filename,
audit_output_options.output_directory,
)
# If there are failed findings exit code 3, except if -z is input
if not args.ignore_exit_code_3 and stats["total_fail"] > 0:

View File

@@ -0,0 +1,214 @@
{
"Framework": "AWS-Audit-Manager-Control-Tower-Guardrails",
"Version": "",
"Provider": "AWS",
"Description": "AWS Control Tower is a management and governance service that you can use to navigate through the setup process and governance requirements that are involved in creating a multi-account AWS environment.",
"Requirements": [
{
"Id": "1.0.1",
"Name": "Disallow launch of EC2 instance types that are not EBS-optimized",
"Description": "Checks whether EBS optimization is enabled for your EC2 instances that can be EBS-optimized",
"Attributes": [
{
"ItemId": "1.0.1",
"Section": "EBS checks",
"Service": "ebs"
}
],
"Checks": []
},
{
"Id": "1.0.2",
"Name": "Disallow EBS volumes that are unattached to an EC2 instance",
"Description": "Checks whether EBS volumes are attached to EC2 instances",
"Attributes": [
{
"ItemId": "1.0.2",
"Section": "EBS checks",
"Service": "ebs"
}
],
"Checks": []
},
{
"Id": "1.0.3",
"Name": "Enable encryption for EBS volumes attached to EC2 instances",
"Description": "Checks whether EBS volumes that are in an attached state are encrypted",
"Attributes": [
{
"ItemId": "1.0.3",
"Section": "EBS checks",
"Service": "ebs"
}
],
"Checks": [
"ec2_ebs_default_encryption"
]
},
{
"Id": "2.0.1",
"Name": "Disallow internet connection through RDP",
"Description": "Checks whether security groups that are in use disallow unrestricted incoming TCP traffic to the specified",
"Attributes": [
{
"ItemId": "2.0.1",
"Section": "Disallow Internet Connection",
"Service": "vpc"
}
],
"Checks": [
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389"
]
},
{
"Id": "2.0.2",
"Name": "Disallow internet connection through SSH",
"Description": "Checks whether security groups that are in use disallow unrestricted incoming SSH traffic.",
"Attributes": [
{
"ItemId": "2.0.2",
"Section": "Disallow Internet Connection",
"Service": "vpc"
}
],
"Checks": [
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22"
]
},
{
"Id": "3.0.1",
"Name": "Disallow access to IAM users without MFA",
"Description": "Checks whether the AWS Identity and Access Management users have multi-factor authentication (MFA) enabled.",
"Attributes": [
{
"ItemId": "3.0.1",
"Section": "Multi-Factor Authentication",
"Service": "iam"
}
],
"Checks": [
"iam_user_mfa_enabled_console_access"
]
},
{
"Id": "3.0.2",
"Name": "Disallow console access to IAM users without MFA",
"Description": "Checks whether AWS Multi-Factor Authentication (MFA) is enabled for all AWS Identity and Access Management (IAM) users that use a console password.",
"Attributes": [
{
"ItemId": "3.0.2",
"Section": "Multi-Factor Authentication",
"Service": "iam"
}
],
"Checks": [
"iam_user_mfa_enabled_console_access"
]
},
{
"Id": "3.0.3",
"Name": "Enable MFA for the root user",
"Description": "Checks whether the root user of your AWS account requires multi-factor authentication for console sign-in.",
"Attributes": [
{
"ItemId": "3.0.3",
"Section": "Multi-Factor Authentication",
"Service": "iam"
}
],
"Checks": [
"iam_root_mfa_enabled"
]
},
{
"Id": "4.0.1",
"Name": "Disallow public access to RDS database instances",
"Description": "Checks whether the Amazon Relational Database Service (RDS) instances are not publicly accessible. The rule is non-compliant if the publiclyAccessible field is true in the instance configuration item.",
"Attributes": [
{
"ItemId": "4.0.1",
"Section": "Disallow Public Access",
"Service": "rds"
}
],
"Checks": [
"rds_instance_no_public_access"
]
},
{
"Id": "4.0.2",
"Name": "Disallow public access to RDS database snapshots",
"Description": "Checks if Amazon Relational Database Service (Amazon RDS) snapshots are public. The rule is non-compliant if any existing and new Amazon RDS snapshots are public.",
"Attributes": [
{
"ItemId": "4.0.2",
"Section": "Disallow Public Access",
"Service": "rds"
}
],
"Checks": [
"rds_snapshots_public_access"
]
},
{
"Id": "4.1.1",
"Name": "Disallow public read access to S3 buckets",
"Description": "Checks that your S3 buckets do not allow public read access.",
"Attributes": [
{
"ItemId": "4.1.1",
"Section": "Disallow Public Access",
"Service": "s3"
}
],
"Checks": [
"rds_instance_no_public_access"
]
},
{
"Id": "4.1.2",
"Name": "Disallow public write access to S3 buckets",
"Description": "Checks that your S3 buckets do not allow public write access.",
"Attributes": [
{
"ItemId": "4.1.2",
"Section": "Disallow Public Access",
"Service": "s3"
}
],
"Checks": [
"s3_bucket_policy_public_write_access"
]
},
{
"Id": "5.0.1",
"Name": "Disallow RDS database instances that are not storage encrypted ",
"Description": "Checks whether storage encryption is enabled for your RDS DB instances.",
"Attributes": [
{
"ItemId": "5.0.1",
"Section": "Disallow Instances",
"Service": "rds"
}
],
"Checks": [
"rds_instance_storage_encrypted"
]
},
{
"Id": "5.1.1",
"Name": "Disallow S3 buckets that are not versioning enabled",
"Description": "Checks whether versioning is enabled for your S3 buckets.",
"Attributes": [
{
"ItemId": "5.1.1",
"Section": "Disallow Instances",
"Service": "s3"
}
],
"Checks": [
"s3_bucket_object_versioning"
]
}
]
}

View File

@@ -0,0 +1,604 @@
{
"Framework": "AWS-Foundational-Security-Best-Practices",
"Version": "",
"Provider": "AWS",
"Description": "The AWS Foundational Security Best Practices standard is a set of controls that detect when your deployed accounts and resources deviate from security best practices.",
"Requirements": [
{
"Id": "account",
"Name": "Account",
"Description": "This section contains recommendations for configuring AWS Account.",
"Attributes": [
{
"ItemId": "account",
"Section": "Account",
"Service": "account"
}
],
"Checks": [
"account_security_contact_information_is_registered"
]
},
{
"Id": "acm",
"Name": "ACM",
"Description": "This section contains recommendations for configuring ACM resources.",
"Attributes": [
{
"ItemId": "acm",
"Section": "Acm",
"Service": "acm"
}
],
"Checks": [
"account_security_contact_information_is_registered"
]
},
{
"Id": "api-gateway",
"Name": "API Gateway",
"Description": "This section contains recommendations for configuring API Gateway resources.",
"Attributes": [
{
"ItemId": "api-gateway",
"Section": "API Gateway",
"Service": "apigateway"
}
],
"Checks": [
"apigateway_logging_enabled",
"apigateway_client_certificate_enabled",
"apigateway_waf_acl_attached",
"apigatewayv2_authorizers_enabled",
"apigatewayv2_access_logging_enabled"
]
},
{
"Id": "auto-scaling",
"Name": "Benchmark: Auto Scaling",
"Description": "This section contains recommendations for configuring Auto Scaling resources and options.",
"Attributes": [
{
"ItemId": "auto-scaling",
"Section": "Auto Scaling",
"Service": "autoscaling"
}
],
"Checks": []
},
{
"Id": "cloudformation",
"Name": "Benchmark: CloudFormation",
"Description": "This section contains recommendations for configuring CloudFormation resources and options.",
"Attributes": [
{
"ItemId": "cloudformation",
"Section": "CloudFormation",
"Service": "cloudformation"
}
],
"Checks": []
},
{
"Id": "cloudfront",
"Name": "Benchmark: CloudFront",
"Description": "This section contains recommendations for configuring CloudFront resources and options.",
"Attributes": [
{
"ItemId": "cloudfront",
"Section": "CloudFront",
"Service": "cloudfront"
}
],
"Checks": [
"cloudfront_distributions_https_enabled",
"cloudfront_distributions_logging_enabled",
"cloudfront_distributions_using_waf",
"cloudfront_distributions_field_level_encryption_enabled",
"cloudfront_distributions_using_deprecated_ssl_protocols"
]
},
{
"Id": "cloudtrail",
"Name": "Benchmark: CloudTrail",
"Description": "This section contains recommendations for configuring CloudTrail resources and options.",
"Attributes": [
{
"ItemId": "cloudtrail",
"Section": "CloudTrail",
"Service": "cloudtrail"
}
],
"Checks": [
"cloudtrail_multi_region_enabled",
"cloudtrail_kms_encryption_enabled",
"cloudtrail_log_file_validation_enabled",
"cloudtrail_cloudwatch_logging_enabled"
]
},
{
"Id": "codebuild",
"Name": "Benchmark: CodeBuild",
"Description": "This section contains recommendations for configuring CodeBuild resources and options.",
"Attributes": [
{
"ItemId": "codebuild",
"Section": "CodeBuild",
"Service": "codebuild"
}
],
"Checks": []
},
{
"Id": "config",
"Name": "Benchmark: Config",
"Description": "This section contains recommendations for configuring AWS Config.",
"Attributes": [
{
"ItemId": "config",
"Section": "Config",
"Service": "config"
}
],
"Checks": [
"config_recorder_all_regions_enabled"
]
},
{
"Id": "dms",
"Name": "Benchmark: DMS",
"Description": "This section contains recommendations for configuring AWS DMS resources and options.",
"Attributes": [
{
"ItemId": "dms",
"Section": "DMS",
"Service": "dms"
}
],
"Checks": []
},
{
"Id": "dynamodb",
"Name": "Benchmark: DynamoDB",
"Description": "This section contains recommendations for configuring AWS Dynamo DB resources and options.",
"Attributes": [
{
"ItemId": "dynamodb",
"Section": "DynamoDB",
"Service": "dynamodb"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_accelerator_cluster_encryption_enabled"
]
},
{
"Id": "ec2",
"Name": "Benchmark: EC2",
"Description": "This section contains recommendations for configuring EC2 resources and options.",
"Attributes": [
{
"ItemId": "ec2",
"Section": "EC2",
"Service": "ec2"
}
],
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_securitygroup_default_restrict_traffic",
"ec2_ebs_volume_encryption",
"ec2_instance_older_than_specific_days",
"vpc_flow_logs_enabled",
"ec2_ebs_default_encryption",
"ec2_instance_imdsv2_enabled",
"ec2_instance_public_ip",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_not_used"
]
},
{
"Id": "ecr",
"Name": "Benchmark: Elastic Container Registry",
"Description": "This section contains recommendations for configuring AWS ECR resources and options.",
"Attributes": [
{
"ItemId": "ecr",
"Section": "ECR",
"Service": "ecr"
}
],
"Checks": [
"ecr_repositories_scan_images_on_push_enabled",
"ecr_repositories_lifecycle_policy_enabled"
]
},
{
"Id": "ecs",
"Name": "Benchmark: Elastic Container Service",
"Description": "This section contains recommendations for configuring ECS resources and options.",
"Attributes": [
{
"ItemId": "ecs",
"Section": "ECS",
"Service": "ecs"
}
],
"Checks": [
"ecs_task_definitions_no_environment_secrets"
]
},
{
"Id": "efs",
"Name": "Benchmark: EFS",
"Description": "This section contains recommendations for configuring AWS EFS resources and options.",
"Attributes": [
{
"ItemId": "efs",
"Section": "EFS",
"Service": "efs"
}
],
"Checks": [
"efs_encryption_at_rest_enabled",
"efs_have_backup_enabled"
]
},
{
"Id": "eks",
"Name": "Benchmark: EKS",
"Description": "This section contains recommendations for configuring AWS EKS resources and options.",
"Attributes": [
{
"ItemId": "eks",
"Section": "EKS",
"Service": "eks"
}
],
"Checks": []
},
{
"Id": "elastic-beanstalk",
"Name": "Benchmark: Elastic Beanstalk",
"Description": "This section contains recommendations for configuring AWS Elastic Beanstalk resources and options.",
"Attributes": [
{
"ItemId": "elastic-beanstalk",
"Section": "Elastic Beanstalk",
"Service": "elasticbeanstalk"
}
],
"Checks": []
},
{
"Id": "elb",
"Name": "Benchmark: ELB",
"Description": "This section contains recommendations for configuring Elastic Load Balancer resources and options.",
"Attributes": [
{
"ItemId": "elb",
"Section": "ELB",
"Service": "elb"
}
],
"Checks": [
"elbv2_logging_enabled",
"elb_logging_enabled",
"elbv2_deletion_protection",
"elbv2_desync_mitigation_mode"
]
},
{
"Id": "elbv2",
"Name": "Benchmark: ELBv2",
"Description": "This section contains recommendations for configuring Elastic Load Balancer resources and options.",
"Attributes": [
{
"ItemId": "elbv2",
"Section": "ELBv2",
"Service": "elbv2"
}
],
"Checks": []
},
{
"Id": "emr",
"Name": "Benchmark: EMR",
"Description": "This section contains recommendations for configuring EMR resources.",
"Attributes": [
{
"ItemId": "emr",
"Section": "EMR",
"Service": "emr"
}
],
"Checks": [
"emr_cluster_master_nodes_no_public_ip"
]
},
{
"Id": "elasticsearch",
"Name": "Benchmark: Elasticsearch",
"Description": "This section contains recommendations for configuring Elasticsearch resources and options.",
"Attributes": [
{
"ItemId": "elasticsearch",
"Section": "ElasticSearch",
"Service": "elasticsearch"
}
],
"Checks": [
"opensearch_service_domains_encryption_at_rest_enabled",
"opensearch_service_domains_node_to_node_encryption_enabled",
"opensearch_service_domains_audit_logging_enabled",
"opensearch_service_domains_audit_logging_enabled",
"opensearch_service_domains_https_communications_enforced"
]
},
{
"Id": "guardduty",
"Name": "Benchmark: GuardDuty",
"Description": "This section contains recommendations for configuring AWS GuardDuty resources and options.",
"Attributes": [
{
"ItemId": "guardduty",
"Section": "GuardDuty",
"Service": "guardduty"
}
],
"Checks": [
"guardduty_is_enabled"
]
},
{
"Id": "iam",
"Name": "Benchmark: IAM",
"Description": "This section contains recommendations for configuring AWS IAM resources and options.",
"Attributes": [
{
"ItemId": "iam",
"Section": "IAM",
"Service": "iam"
}
],
"Checks": [
"iam_policy_no_administrative_privileges",
"iam_rotate_access_key_90_days",
"iam_no_root_access_key",
"iam_user_mfa_enabled_console_access",
"iam_root_hardware_mfa_enabled",
"iam_password_policy_minimum_length_14",
"iam_disable_90_days_credentials",
"iam_policy_no_administrative_privileges"
]
},
{
"Id": "kinesis",
"Name": "Benchmark: Kinesis",
"Description": "This section contains recommendations for configuring AWS Kinesis resources and options.",
"Attributes": [
{
"ItemId": "kinesis",
"Section": "Kinesis",
"Service": "kinesis"
}
],
"Checks": []
},
{
"Id": "kms",
"Name": "Benchmark: KMS",
"Description": "This section contains recommendations for configuring AWS KMS resources and options.",
"Attributes": [
{
"ItemId": "kms",
"Section": "KMS",
"Service": "kms"
}
],
"Checks": []
},
{
"Id": "lambda",
"Name": "Benchmark: Lambda",
"Description": "This section contains recommendations for configuring Lambda resources and options.",
"Attributes": [
{
"ItemId": "lambda",
"Section": "Lambda",
"Service": "lambda"
}
],
"Checks": [
"awslambda_function_url_public",
"awslambda_function_using_supported_runtimes"
]
},
{
"Id": "network-firewall",
"Name": "Benchmark: Network Firewall",
"Description": "This section contains recommendations for configuring Network Firewall resources and options.",
"Attributes": [
{
"ItemId": "network-firewall",
"Section": "Network Firewall",
"Service": "network-firewall"
}
],
"Checks": []
},
{
"Id": "opensearch",
"Name": "Benchmark: OpenSearch",
"Description": "This section contains recommendations for configuring OpenSearch resources and options.",
"Attributes": [
{
"ItemId": "opensearch",
"Section": "OpenSearch",
"Service": "opensearch"
}
],
"Checks": [
"opensearch_service_domains_not_publicly_accessible"
]
},
{
"Id": "rds",
"Name": "Benchmark: RDS",
"Description": "This section contains recommendations for configuring AWS RDS resources and options.",
"Attributes": [
{
"ItemId": "rds",
"Section": "RDS",
"Service": "rds"
}
],
"Checks": [
"rds_snapshots_public_access",
"rds_instance_no_public_access",
"rds_instance_storage_encrypted",
"rds_instance_storage_encrypted",
"rds_instance_multi_az",
"rds_instance_enhanced_monitoring_enabled",
"rds_instance_deletion_protection",
"rds_instance_integration_cloudwatch_logs",
"rds_instance_minor_version_upgrade_enabled",
"rds_instance_multi_az"
]
},
{
"Id": "redshift",
"Name": "Benchmark: Redshift",
"Description": "This section contains recommendations for configuring AWS Redshift resources and options.",
"Attributes": [
{
"ItemId": "redshift",
"Section": "Redshift",
"Service": "redshift"
}
],
"Checks": [
"redshift_cluster_public_access",
"redshift_cluster_automated_snapshot",
"redshift_cluster_automated_snapshot",
"redshift_cluster_automatic_upgrades"
]
},
{
"Id": "s3",
"Name": "Benchmark: S3",
"Description": "This section contains recommendations for configuring AWS S3 resources and options.",
"Attributes": [
{
"ItemId": "s3",
"Section": "S3",
"Service": "s3"
}
],
"Checks": [
"s3_account_level_public_access_blocks",
"s3_account_level_public_access_blocks",
"s3_bucket_policy_public_write_access",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"s3_bucket_public_access",
"s3_bucket_server_access_logging_enabled",
"s3_bucket_object_versioning",
"s3_bucket_acl_prohibited"
]
},
{
"Id": "sagemaker",
"Name": "Benchmark: SageMaker",
"Description": "This section contains recommendations for configuring AWS Sagemaker resources and options.",
"Attributes": [
{
"ItemId": "sagemaker",
"Section": "SageMaker",
"Service": "sagemaker"
}
],
"Checks": [
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"sagemaker_notebook_instance_vpc_settings_configured",
"sagemaker_notebook_instance_root_access_disabled"
]
},
{
"Id": "secretsmanager",
"Name": "Benchmark: Secrets Manager",
"Description": "This section contains recommendations for configuring AWS Secrets Manager resources.",
"Attributes": [
{
"ItemId": "secretsmanager",
"Section": "Secrets Manager",
"Service": "secretsmanager"
}
],
"Checks": [
"secretsmanager_automatic_rotation_enabled",
"secretsmanager_automatic_rotation_enabled"
]
},
{
"Id": "sns",
"Name": "Benchmark: SNS",
"Description": "This section contains recommendations for configuring AWS SNS resources and options.",
"Attributes": [
{
"ItemId": "sns",
"Section": "SNS",
"Service": "sns"
}
],
"Checks": [
"sns_topics_kms_encryption_at_rest_enabled"
]
},
{
"Id": "sqs",
"Name": "Benchmark: SQS",
"Description": "This section contains recommendations for configuring AWS SQS resources and options.",
"Attributes": [
{
"ItemId": "sqs",
"Section": "SQS",
"Service": "sqs"
}
],
"Checks": [
"sqs_queues_server_side_encryption_enabled"
]
},
{
"Id": "ssm",
"Name": "Benchmark: SSM",
"Description": "This section contains recommendations for configuring AWS Systems Manager resources and options.",
"Attributes": [
{
"ItemId": "ssm",
"Section": "SSM",
"Service": "ssm"
}
],
"Checks": [
"ec2_instance_managed_by_ssm",
"ssm_managed_compliant_patching",
"ssm_managed_compliant_patching"
]
},
{
"Id": "waf",
"Name": "Benchmark: WAF",
"Description": "This section contains recommendations for configuring AWS WAF resources and options.",
"Attributes": [
{
"ItemId": "waf",
"Section": "WAF",
"Service": "waf"
}
],
"Checks": []
}
]
}

View File

@@ -1,6 +1,8 @@
{
"Framework": "CIS-AWS",
"Framework": "CIS",
"Version": "1.4",
"Provider": "AWS",
"Description": "The CIS Benchmark for CIS Amazon Web Services Foundations Benchmark, v1.4.0, Level 1 and 2 provides prescriptive guidance for configuring security options for a subset of Amazon Web Services. It has an emphasis on foundational, testable, and architecture agnostic settings",
"Requirements": [
{
"Id": "1.1",
@@ -258,7 +260,7 @@
"Id": "1.20",
"Description": "Ensure that IAM Access analyzer is enabled for all regions",
"Checks": [
"accessanalyzer_enabled_without_findings"
"accessanalyzer_enabled"
],
"Attributes": [
{

View File

@@ -1,6 +1,8 @@
{
"Framework": "CIS-AWS",
"Framework": "CIS",
"Version": "1.5",
"Provider": "AWS",
"Description": "The CIS Amazon Web Services Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Amazon Web Services with an emphasis on foundational, testable, and architecture agnostic settings.",
"Requirements": [
{
"Id": "1.1",
@@ -258,7 +260,7 @@
"Id": "1.20",
"Description": "Ensure that IAM Access analyzer is enabled for all regions",
"Checks": [
"accessanalyzer_enabled_without_findings"
"accessanalyzer_enabled"
],
"Attributes": [
{

View File

@@ -0,0 +1,421 @@
{
"Framework": "CISA",
"Version": "",
"Provider": "AWS",
"Description": "Cybersecurity & Infrastructure Security Agency's (CISA) Cyber Essentials is a guide for leaders of small businesses as well as leaders of small and local government agencies to develop an actionable understanding of where to start implementing organizational cybersecurity practices.",
"Requirements": [
{
"Id": "your-systems-1",
"Name": "Your Systems-1",
"Description": "Learn what is on your network. Maintain inventories of hardware and software assets to know what is in play and at-risk from attack.",
"Attributes": [
{
"ItemId": "your-systems-1",
"Section": "your systems",
"Service": "aws"
}
],
"Checks": [
"ec2_instance_managed_by_ssm",
"ec2_instance_older_than_specific_days",
"ssm_managed_compliant_patching",
"ec2_elastic_ip_unassgined"
]
},
{
"Id": "your-systems-2",
"Name": "Your Systems-2",
"Description": "Leverage automatic updates for all operating systems and third-party software.",
"Attributes": [
{
"ItemId": "your-systems-2",
"Section": "your systems",
"Service": "aws"
}
],
"Checks": [
"rds_instance_minor_version_upgrade_enabled",
"redshift_cluster_automatic_upgrades",
"ssm_managed_compliant_patching"
]
},
{
"Id": "your-systems-3",
"Name": "Your Systems-3",
"Description": "Implement security configurations for all hardware and software assets.",
"Attributes": [
{
"ItemId": "your-systems-3",
"Section": "your systems",
"Service": "aws"
}
],
"Checks": [
"apigateway_client_certificate_enabled",
"apigateway_logging_enabled",
"apigateway_waf_acl_attached",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_kms_encryption_enabled",
"cloudtrail_log_file_validation_enabled",
"codebuild_project_user_controlled_buildspec",
"dynamodb_accelerator_cluster_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"ec2_ebs_volume_encryption",
"ec2_ebs_public_snapshot",
"ec2_ebs_default_encryption",
"ec2_instance_public_ip",
"efs_encryption_at_rest_enabled",
"efs_have_backup_enabled",
"elb_logging_enabled",
"elbv2_deletion_protection",
"elbv2_waf_acl_attached",
"elbv2_ssl_listeners",
"elb_ssl_listeners",
"emr_cluster_master_nodes_no_public_ip",
"opensearch_service_domains_encryption_at_rest_enabled",
"opensearch_service_domains_cloudwatch_logging_enabled",
"opensearch_service_domains_node_to_node_encryption_enabled",
"guardduty_is_enabled",
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_no_custom_policy_permissive_role_assumption",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
"iam_user_mfa_enabled_console_access",
"iam_user_mfa_enabled_console_access",
"iam_disable_90_days_credentials",
"kms_cmk_rotation_enabled",
"awslambda_function_not_publicly_accessible",
"awslambda_function_not_publicly_accessible",
"cloudwatch_log_group_kms_encryption_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"rds_instance_enhanced_monitoring_enabled",
"rds_instance_backup_enabled",
"rds_instance_deletion_protection",
"rds_instance_storage_encrypted",
"rds_instance_backup_enabled",
"rds_instance_integration_cloudwatch_logs",
"rds_instance_multi_az",
"rds_instance_no_public_access",
"rds_instance_storage_encrypted",
"rds_snapshots_public_access",
"redshift_cluster_automated_snapshot",
"redshift_cluster_audit_logging",
"redshift_cluster_public_access",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"s3_bucket_server_access_logging_enabled",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_bucket_object_versioning",
"s3_account_level_public_access_blocks",
"s3_bucket_public_access",
"sagemaker_training_jobs_volume_and_output_encryption_enabled",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"sagemaker_notebook_instance_encryption_enabled",
"secretsmanager_automatic_rotation_enabled",
"securityhub_enabled",
"sns_topics_kms_encryption_at_rest_enabled",
"vpc_endpoint_connections_trust_boundaries",
"ec2_securitygroup_default_restrict_traffic",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port"
]
},
{
"Id": "your_-urroundings-1",
"Name": "Your Surroundings-1",
"Description": "Learn who is on your network. Maintain inventories of network connections (user accounts, vendors, business partners, etc.).",
"Attributes": [
{
"ItemId": "your-surroundings-1",
"Section": "your surroundings",
"Service": "aws"
}
],
"Checks": [
"ec2_elastic_ip_unassgined",
"vpc_flow_logs_enabled"
]
},
{
"Id": "your-surroundings-2",
"Name": "Your Surroundings-2",
"Description": "Leverage multi-factor authentication for all users, starting with privileged, administrative and remote access users.",
"Attributes": [
{
"ItemId": "your-surroundings-2",
"Section": "your surroundings",
"Service": "aws"
}
],
"Checks": [
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_user_mfa_enabled_console_access"
]
},
{
"Id": "your-surroundings-3",
"Name": "Your Surroundings-3",
"Description": "Grant access and admin permissions based on need-to-know and least privilege.",
"Attributes": [
{
"ItemId": "your-surroundings-3",
"Section": "your surroundings",
"Service": "aws"
}
],
"Checks": [
"elbv2_ssl_listeners",
"iam_no_custom_policy_permissive_role_assumption",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
{
"Id": "your-surroundings-4",
"Name": "Your Surroundings-4",
"Description": "Leverage unique passwords for all user accounts.",
"Attributes": [
{
"ItemId": "your-surroundings-4",
"Section": "your surroundings",
"Service": "aws"
}
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase"
]
},
{
"Id": "your-data-1",
"Name": "Your Data-1",
"Description": "Learn how your data is protected.",
"Attributes": [
{
"ItemId": "your-data-1",
"Section": "your data",
"Service": "aws"
}
],
"Checks": [
"efs_encryption_at_rest_enabled",
"cloudtrail_kms_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"ec2_ebs_volume_encryption",
"ec2_ebs_default_encryption",
"opensearch_service_domains_encryption_at_rest_enabled",
"rds_instance_storage_encrypted",
"rds_instance_storage_encrypted",
"redshift_cluster_audit_logging",
"s3_bucket_default_encryption",
"sagemaker_training_jobs_volume_and_output_encryption_enabled",
"sagemaker_notebook_instance_encryption_enabled",
"sns_topics_kms_encryption_at_rest_enabled"
]
},
{
"Id": "your-data-2",
"Name": "Your Data-2",
"Description": "Learn what is happening on your network, manage network and perimeter components, host and device components, data-at-rest and in-transit, and user behavior activities.",
"Attributes": [
{
"ItemId": "your-data-2",
"Section": "your data",
"Service": "aws"
}
],
"Checks": [
"acm_certificates_expiration_check",
"apigateway_client_certificate_enabled",
"apigateway_logging_enabled",
"efs_have_backup_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"ec2_ebs_volume_encryption",
"ec2_instance_public_ip",
"efs_encryption_at_rest_enabled",
"elb_logging_enabled",
"elbv2_waf_acl_attached",
"elbv2_ssl_listeners",
"elb_ssl_listeners",
"emr_cluster_master_nodes_no_public_ip",
"opensearch_service_domains_encryption_at_rest_enabled",
"opensearch_service_domains_cloudwatch_logging_enabled",
"opensearch_service_domains_node_to_node_encryption_enabled",
"awslambda_function_not_publicly_accessible",
"awslambda_function_not_publicly_accessible",
"cloudwatch_log_group_kms_encryption_enabled",
"rds_instance_storage_encrypted",
"rds_instance_integration_cloudwatch_logs",
"rds_instance_no_public_access",
"rds_snapshots_public_access",
"rds_snapshots_public_access",
"redshift_cluster_audit_logging",
"redshift_cluster_public_access",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"redshift_cluster_public_access",
"s3_bucket_server_access_logging_enabled",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_account_level_public_access_blocks",
"s3_bucket_acl_prohibited",
"sagemaker_training_jobs_volume_and_output_encryption_enabled",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"sagemaker_notebook_instance_encryption_enabled",
"sns_topics_kms_encryption_at_rest_enabled",
"ec2_securitygroup_default_restrict_traffic",
"vpc_flow_logs_enabled",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port"
]
},
{
"Id": "your-data-3",
"Name": "Your Data-3",
"Description": "Domain name system protection.",
"Attributes": [
{
"ItemId": "your-data-3",
"Section": "your data",
"Service": "aws"
}
],
"Checks": [
"elbv2_waf_acl_attached"
]
},
{
"Id": "your-data-4",
"Name": "Your Data-4",
"Description": "Establish regular automated backups and redundancies of key systems.",
"Attributes": [
{
"ItemId": "your-data-4",
"Section": "your data",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"elbv2_deletion_protection",
"rds_instance_backup_enabled",
"rds_instance_deletion_protection",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "your-data-5",
"Name": "Your Data-5",
"Description": "Leverage protections for backups, including physical security, encryption and offline copies.",
"Attributes": [
{
"ItemId": "your-data-5",
"Section": "your data",
"Service": "aws"
}
],
"Checks": []
},
{
"Id": "your-crisis-response-2",
"Name": "Your Crisis Response-2",
"Description": "Lead development of an internal reporting structure to detect, communicate and contain attacks.",
"Attributes": [
{
"ItemId": "your-crisis-response-2",
"Section": "your crisis response",
"Service": "aws"
}
],
"Checks": [
"guardduty_is_enabled",
"securityhub_enabled"
]
},
{
"Id": "booting-up-thing-to-do-first-1",
"Name": "YBooting Up: Things to Do First-1",
"Description": "Lead development of an internal reporting structure to detect, communicate and contain attacks.",
"Attributes": [
{
"ItemId": "booting-up-thing-to-do-first-1",
"Section": "booting up thing to do first",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "booting-up-thing-to-do-first-2",
"Name": "YBooting Up: Things to Do First-2",
"Description": "Require multi-factor authentication (MFA) for accessing your systems whenever possible. MFA should be required of all users, but start with privileged, administrative, and remote access users.",
"Attributes": [
{
"ItemId": "booting-up-thing-to-do-first-2",
"Section": "booting up thing to do first",
"Service": "aws"
}
],
"Checks": [
"iam_user_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_user_mfa_enabled_console_access",
"iam_user_hardware_mfa_enabled"
]
},
{
"Id": "booting-up-thing-to-do-first-3",
"Name": "YBooting Up: Things to Do First-3",
"Description": "Enable automatic updates whenever possible. Replace unsupported operating systems, applications and hardware. Test and deploy patches quickly.",
"Attributes": [
{
"ItemId": "booting-up-thing-to-do-first-1",
"Section": "booting up thing to do first",
"Service": "aws"
}
],
"Checks": [
"rds_instance_minor_version_upgrade_enabled",
"redshift_cluster_automatic_upgrades",
"ssm_managed_compliant_patching"
]
}
]
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,440 @@
{
"Framework": "FedRAMP-Low-Revision-4",
"Version": "",
"Provider": "AWS",
"Description": "The Federal Risk and Authorization Management Program (FedRAMP) was established in 2011. It provides a cost-effective, risk-based approach for the adoption and use of cloud services by the U.S. federal government. FedRAMP empowers federal agencies to use modern cloud technologies, with an emphasis on the security and protection of federal information.",
"Requirements": [
{
"Id": "ac-2",
"Name": "Account Management (AC-2)",
"Description": "Manage system accounts, group memberships, privileges, workflow, notifications, deactivations, and authorizations.",
"Attributes": [
{
"ItemId": "ac-2",
"Section": "Access Control (AC)",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_log_file_validation_enabled",
"cloudwatch_changes_to_network_acls_alarm_configured",
"opensearch_service_domains_cloudwatch_logging_enabled",
"guardduty_is_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
"iam_user_mfa_enabled_console_access",
"iam_user_hardware_mfa_enabled",
"iam_disable_90_days_credentials",
"rds_instance_integration_cloudwatch_logs",
"redshift_cluster_audit_logging",
"s3_bucket_server_access_logging_enabled",
"securityhub_enabled"
]
},
{
"Id": "ac-3",
"Name": "Account Management (AC-3)",
"Description": "The information system enforces approved authorizations for logical access to information and system resources in accordance with applicable access control policies.",
"Attributes": [
{
"ItemId": "ac-3",
"Section": "Access Control (AC)",
"Service": "aws"
}
],
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
"rds_instance_no_public_access",
"rds_snapshots_public_access",
"redshift_cluster_public_access",
"s3_bucket_policy_public_write_access",
"s3_account_level_public_access_blocks",
"s3_bucket_public_access",
"sagemaker_notebook_instance_without_direct_internet_access_configured"
]
},
{
"Id": "ac-17",
"Name": "Remote Access (AC-17)",
"Description": "Authorize remote access systems prior to connection. Enforce remote connection requirements to information systems.",
"Attributes": [
{
"ItemId": "ac-17",
"Section": "Access Control (AC)",
"Service": "aws"
}
],
"Checks": [
"acm_certificates_expiration_check",
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"elb_ssl_listeners",
"emr_cluster_master_nodes_no_public_ip",
"guardduty_is_enabled",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
"rds_instance_no_public_access",
"rds_snapshots_public_access",
"redshift_cluster_public_access",
"s3_bucket_secure_transport_policy",
"s3_bucket_policy_public_write_access",
"s3_account_level_public_access_blocks",
"s3_bucket_public_access",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"securityhub_enabled",
"ec2_securitygroup_default_restrict_traffic",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "au-2",
"Name": "Audit Events (AU-2)",
"Description": "The organization: a. Determines that the information system is capable of auditing the following events: [Assignment: organization-defined auditable events]; b. Coordinates the security audit function with other organizational entities requiring audit- related information to enhance mutual support and to help guide the selection of auditable events; c. Provides a rationale for why the auditable events are deemed to be adequate support after- the-fact investigations of security incidents",
"Attributes": [
{
"ItemId": "au-2",
"Section": "Audit and Accountability (AU)",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_log_file_validation_enabled",
"elbv2_logging_enabled",
"rds_instance_integration_cloudwatch_logs",
"redshift_cluster_audit_logging",
"s3_bucket_server_access_logging_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "au-9",
"Name": "Protection of Audit Information (AU-9)",
"Description": "The information system protects audit information and audit tools from unauthorized access, modification, and deletion.",
"Attributes": [
{
"ItemId": "au-9",
"Section": "Audit and Accountability (AU)",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_kms_encryption_enabled",
"cloudtrail_log_file_validation_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"s3_bucket_object_versioning"
]
},
{
"Id": "au-11",
"Name": "Audit Record Retention (AU-11)",
"Description": "The organization retains audit records for at least 90 days to provide support for after-the-fact investigations of security incidents and to meet regulatory and organizational information retention requirements.",
"Attributes": [
{
"ItemId": "au-11",
"Section": "Audit and Accountability (AU)",
"Service": "aws"
}
],
"Checks": [
"cloudwatch_log_group_retention_policy_specific_days_enabled"
]
},
{
"Id": "ca-7",
"Name": "Continuous Monitoring (CA-7)",
"Description": "Continuously monitor configuration management processes. Determine security impact, environment and operational risks.",
"Attributes": [
{
"ItemId": "ca-7",
"Section": "Security Assessment And Authorization (CA)",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudwatch_changes_to_network_acls_alarm_configured",
"ec2_instance_imdsv2_enabled",
"elbv2_waf_acl_attached",
"guardduty_is_enabled",
"rds_instance_enhanced_monitoring_enabled",
"redshift_cluster_audit_logging",
"securityhub_enabled"
]
},
{
"Id": "cm-2",
"Name": "Baseline Configuration (CM-2)",
"Description": "The organization develops, documents, and maintains under configuration control, a current baseline configuration of the information system.",
"Attributes": [
{
"ItemId": "cm-2",
"Section": "Configuration Management (CM)",
"Service": "aws"
}
],
"Checks": [
"apigateway_waf_acl_attached",
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"ec2_instance_older_than_specific_days",
"elbv2_deletion_protection",
"emr_cluster_master_nodes_no_public_ip",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
"rds_instance_no_public_access",
"rds_snapshots_public_access",
"redshift_cluster_public_access",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_account_level_public_access_blocks",
"s3_bucket_public_access",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"ssm_managed_compliant_patching",
"ec2_securitygroup_default_restrict_traffic",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "cm-8",
"Name": "Information System Component Inventory (CM-8)",
"Description": "The organization develops and documents an inventory of information system components that accurately reflects the current information system, includes all components within the authorization boundary of the information system, is at the level of granularity deemed necessary for tracking and reporting and reviews and updates the information system component inventory.",
"Attributes": [
{
"ItemId": "cm-8",
"Section": "Configuration Management (CM)",
"Service": "aws"
}
],
"Checks": [
"ec2_instance_managed_by_ssm",
"guardduty_is_enabled",
"ssm_managed_compliant_patching",
"ssm_managed_compliant_patching"
]
},
{
"Id": "cp-9",
"Name": "Information System Backup (CP-9)",
"Description": "The organization conducts backups of user-level information, system-level information and information system documentation including security-related documentation contained in the information system and protects the confidentiality, integrity, and availability of backup information at storage locations.",
"Attributes": [
{
"ItemId": "cp-9",
"Section": "Contingency Planning (CP)",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "cp-10",
"Name": "Information System Recovery And Reconstitution (CP-10)",
"Description": "The organization provides for the recovery and reconstitution of the information system to a known state after a disruption, compromise, or failure.",
"Attributes": [
{
"ItemId": "cp-10",
"Section": "Contingency Planning (CP)",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"elbv2_deletion_protection",
"rds_instance_backup_enabled",
"rds_instance_multi_az",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "ia-2",
"Name": "Identification and Authentication (Organizational users) (IA-2)",
"Description": "The information system uniquely identifies and authenticates organizational users (or processes acting on behalf of organizational users).",
"Attributes": [
{
"ItemId": "ia-2",
"Section": "Identification and Authentication (IA)",
"Service": "aws"
}
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
"iam_user_mfa_enabled_console_access",
"iam_user_mfa_enabled_console_access"
]
},
{
"Id": "ir-4",
"Name": "Incident Handling (IR-4)",
"Description": "The organization implements an incident handling capability for security incidents that includes preparation, detection and analysis, containment, eradication, and recovery, coordinates incident handling activities with contingency planning activities and incorporates lessons learned from ongoing incident handling activities into incident response procedures, training, and testing, and implements the resulting changes accordingly.",
"Attributes": [
{
"ItemId": "ir-4",
"Section": "Incident Response (IR)",
"Service": "aws"
}
],
"Checks": [
"cloudwatch_changes_to_network_acls_alarm_configured",
"cloudwatch_changes_to_network_gateways_alarm_configured",
"cloudwatch_changes_to_network_route_tables_alarm_configured",
"cloudwatch_changes_to_vpcs_alarm_configured",
"guardduty_is_enabled",
"guardduty_no_high_severity_findings",
"securityhub_enabled"
]
},
{
"Id": "sa-3",
"Name": "System Development Life Cycle (SA-3)",
"Description": "The organization manages the information system using organization-defined system development life cycle, defines and documents information security roles and responsibilities throughout the system development life cycle, identifies individuals having information security roles and responsibilities and integrates the organizational information security risk management process into system development life cycle activities.",
"Attributes": [
{
"ItemId": "sa-3",
"Section": "System and Services Acquisition (SA)",
"Service": "aws"
}
],
"Checks": [
"ec2_instance_managed_by_ssm"
]
},
{
"Id": "sc-5",
"Name": "Denial Of Service Protection (SC-5)",
"Description": "The information system protects against or limits the effects of the following types of denial of service attacks: [Assignment: organization-defined types of denial of service attacks or references to sources for such information] by employing [Assignment: organization-defined security safeguards].",
"Attributes": [
{
"ItemId": "sc-5",
"Section": "System and Communications Protection (SC)",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"elbv2_deletion_protection",
"guardduty_is_enabled",
"rds_instance_backup_enabled",
"rds_instance_deletion_protection",
"rds_instance_multi_az",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "sc-7",
"Name": "Boundary Protection (SC-7)",
"Description": "The information system: a. Monitors and controls communications at the external boundary of the system and at key internal boundaries within the system; b. Implements subnetworks for publicly accessible system components that are [Selection: physically; logically] separated from internal organizational networks; and c. Connects to external networks or information systems only through managed interfaces consisting of boundary protection devices arranged in accordance with an organizational security architecture.",
"Attributes": [
{
"ItemId": "sc-7",
"Section": "System and Communications Protection (SC)",
"Service": "aws"
}
],
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"elbv2_waf_acl_attached",
"elb_ssl_listeners",
"emr_cluster_master_nodes_no_public_ip",
"opensearch_service_domains_node_to_node_encryption_enabled",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
"rds_instance_no_public_access",
"rds_snapshots_public_access",
"redshift_cluster_public_access",
"s3_bucket_secure_transport_policy",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_account_level_public_access_blocks",
"s3_bucket_public_access",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"ec2_securitygroup_default_restrict_traffic",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "sc-12",
"Name": "Cryptographic Key Establishment And Management (SC-12)",
"Description": "The organization establishes and manages cryptographic keys for required cryptography employed within the information system in accordance with [Assignment: organization-defined requirements for key generation, distribution, storage, access, and destruction].",
"Attributes": [
{
"ItemId": "sc-12",
"Section": "System and Communications Protection (SC)",
"Service": "aws"
}
],
"Checks": [
"acm_certificates_expiration_check",
"kms_cmk_rotation_enabled"
]
},
{
"Id": "sc-13",
"Name": "Use of Cryptography (SC-13)",
"Description": "The information system implements FIPS-validated or NSA-approved cryptography in accordance with applicable federal laws, Executive Orders, directives, policies, regulations, and standards.",
"Attributes": [
{
"ItemId": "sc-13",
"Section": "System and Communications Protection (SC)",
"Service": "aws"
}
],
"Checks": [
"s3_bucket_default_encryption",
"sagemaker_training_jobs_volume_and_output_encryption_enabled",
"sagemaker_notebook_instance_encryption_enabled",
"sns_topics_kms_encryption_at_rest_enabled"
]
}
]
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,902 @@
{
"Framework": "FFIEC",
"Version": "",
"Provider": "AWS",
"Description": "In light of the increasing volume and sophistication of cyber threats, the Federal Financial Institutions Examination Council (FFIEC) developed the Cybersecurity Assessment Tool (Assessment), on behalf of its members, to help institutions identify their risks and determine their cybersecurity maturity.",
"Requirements": [
{
"Id": "d1-g-it-b-1",
"Name": "D1.G.IT.B.1",
"Description": "An inventory of organizational assets (e.g., hardware, software, data, and systems hosted externally) is maintained.",
"Attributes": [
{
"ItemId": "d1-g-it-b-1",
"Section": "Cyber Risk Management and Oversight (Domain 1)",
"SubSection": "Governance (G)",
"Service": "aws"
}
],
"Checks": [
"ec2_instance_managed_by_ssm",
"ec2_instance_older_than_specific_days",
"ec2_elastic_ip_unassgined"
]
},
{
"Id": "d1-rm-ra-b-2",
"Name": "D1.RM.RA.B.2",
"Description": "The risk assessment identifies Internet- based systems and high-risk transactions that warrant additional authentication controls.",
"Attributes": [
{
"ItemId": "d1-rm-ra-b-2",
"Section": "Cyber Risk Management and Oversight (Domain 1)",
"SubSection": "Risk Management (RM)",
"Service": "aws"
}
],
"Checks": [
"guardduty_is_enabled"
]
},
{
"Id": "d1-rm-rm-b-1",
"Name": "D1.RM.Rm.B.1",
"Description": "An information security and business continuity risk management function(s) exists within the institution.",
"Attributes": [
{
"ItemId": "d1-rm-rm-b-1",
"Section": "Cyber Risk Management and Oversight (Domain 1)",
"SubSection": "Risk Management (RM)",
"Service": "aws"
}
],
"Checks": [
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_multi_az",
"redshift_cluster_automated_snapshot"
]
},
{
"Id": "d2-is-is-b-1",
"Name": "D2.IS.Is.B.1",
"Description": "Information security threats are gathered and shared with applicable internal employees.",
"Attributes": [
{
"ItemId": "d2-is-is-b-1",
"Section": "Threat Intelligence and Collaboration (Domain 2)",
"SubSection": "Information Sharing (IS)",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_cloudwatch_logging_enabled",
"guardduty_is_enabled",
"securityhub_enabled"
]
},
{
"Id": "d2-ma-ma-b-1",
"Name": "D2.MA.Ma.B.1",
"Description": "Information security threats are gathered and shared with applicable internal employees.",
"Attributes": [
{
"ItemId": "d2-ma-ma-b-1",
"Section": "Threat Intelligence and Collaboration (Domain 2)",
"SubSection": "Monitoring and Analyzing (MA)",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"cloudwatch_log_group_retention_policy_specific_days_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"opensearch_service_domains_cloudwatch_logging_enabled",
"rds_instance_integration_cloudwatch_logs",
"redshift_cluster_audit_logging",
"s3_bucket_server_access_logging_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "d2-ma-ma-b-2",
"Name": "D2.MA.Ma.B.2",
"Description": "Computer event logs are used for investigations once an event has occurred.",
"Attributes": [
{
"ItemId": "d2-ma-ma-b-2",
"Section": "Threat Intelligence and Collaboration (Domain 2)",
"SubSection": "Monitoring and Analyzing (MA)",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"opensearch_service_domains_cloudwatch_logging_enabled",
"redshift_cluster_audit_logging",
"s3_bucket_server_access_logging_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "d2-ti-ti-b-1",
"Name": "D2.TI.Ti.B.1",
"Description": "The institution belongs or subscribes to a threat and vulnerability information-sharing source(s) that provides information on threats (e.g., FS-ISAC, US- CERT).",
"Attributes": [
{
"ItemId": "d2-ti-ti-b-1",
"Section": "Threat Intelligence and Collaboration (Domain 2)",
"SubSection": "Threat Intelligence (TI)",
"Service": "aws"
}
],
"Checks": [
"guardduty_is_enabled",
"securityhub_enabled"
]
},
{
"Id": "d2-ti-ti-b-2",
"Name": "D2.TI.Ti.B.2",
"Description": "Threat information is used to monitor threats and vulnerabilities.",
"Attributes": [
{
"ItemId": "d2-ti-ti-b-2",
"Section": "Threat Intelligence and Collaboration (Domain 2)",
"SubSection": "Threat Intelligence (TI)",
"Service": "aws"
}
],
"Checks": [
"guardduty_is_enabled",
"securityhub_enabled",
"ssm_managed_compliant_patching"
]
},
{
"Id": "d2-ti-ti-b-3",
"Name": "D2.TI.Ti.B.3",
"Description": "Threat information is used to enhance internal risk management and controls.",
"Attributes": [
{
"ItemId": "d2-ti-ti-b-3",
"Section": "Threat Intelligence and Collaboration (Domain 2)",
"SubSection": "Threat Intelligence (TI)",
"Service": "aws"
}
],
"Checks": [
"guardduty_is_enabled",
"securityhub_enabled"
]
},
{
"Id": "d3-cc-pm-b-1",
"Name": "D3.CC.PM.B.1",
"Description": "A patch management program is implemented and ensures that software and firmware patches are applied in a timely manner.",
"Attributes": [
{
"ItemId": "d3-cc-pm-b-1",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Corrective Controls (CC)",
"Service": "aws"
}
],
"Checks": [
"rds_instance_minor_version_upgrade_enabled",
"redshift_cluster_automatic_upgrades",
"ssm_managed_compliant_patching"
]
},
{
"Id": "d3-cc-pm-b-3",
"Name": "D3.CC.PM.B.3",
"Description": "Patch management reports are reviewed and reflect missing security patches.",
"Attributes": [
{
"ItemId": "d3-cc-pm-b-3",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Corrective Controls (CC)",
"Service": "aws"
}
],
"Checks": [
"rds_instance_minor_version_upgrade_enabled",
"redshift_cluster_automatic_upgrades",
"ssm_managed_compliant_patching"
]
},
{
"Id": "d3-dc-an-b-1",
"Name": "D3.DC.An.B.1",
"Description": "The institution is able to detect anomalous activities through monitoring across the environment.",
"Attributes": [
{
"ItemId": "d3-dc-an-b-1",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Detective Controls (DC)",
"Service": "aws"
}
],
"Checks": [
"guardduty_is_enabled",
"guardduty_no_high_severity_findings",
"securityhub_enabled"
]
},
{
"Id": "d3-dc-an-b-2",
"Name": "D3.DC.An.B.2",
"Description": "Customer transactions generating anomalous activity alerts are monitored and reviewed.",
"Attributes": [
{
"ItemId": "d3-dc-an-b-2",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Detective Controls (DC)",
"Service": "aws"
}
],
"Checks": [
"guardduty_is_enabled",
"securityhub_enabled"
]
},
{
"Id": "d3-dc-an-b-3",
"Name": "D3.DC.An.B.3",
"Description": "Logs of physical and/or logical access are reviewed following events.",
"Attributes": [
{
"ItemId": "d3-dc-an-b-3",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Detective Controls (DC)",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"opensearch_service_domains_cloudwatch_logging_enabled",
"rds_instance_integration_cloudwatch_logs",
"s3_bucket_server_access_logging_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "d3-dc-an-b-4",
"Name": "D3.DC.An.B.4",
"Description": "Access to critical systems by third parties is monitored for unauthorized or unusual activity.",
"Attributes": [
{
"ItemId": "d3-dc-an-b-4",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Detective Controls (DC)",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"opensearch_service_domains_cloudwatch_logging_enabled",
"rds_instance_integration_cloudwatch_logs",
"redshift_cluster_audit_logging",
"s3_bucket_server_access_logging_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "d3-dc-an-b-5",
"Name": "D3.DC.An.B.5",
"Description": "Elevated privileges are monitored.",
"Attributes": [
{
"ItemId": "d3-dc-an-b-5",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Detective Controls (DC)",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled"
]
},
{
"Id": "d3-dc-ev-b-1",
"Name": "D3.DC.Ev.B.1",
"Description": "A normal network activity baseline is established.",
"Attributes": [
{
"ItemId": "d3-dc-ev-b-1",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Detective Controls (DC)",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"redshift_cluster_audit_logging",
"vpc_flow_logs_enabled"
]
},
{
"Id": "d3-dc-ev-b-2",
"Name": "D3.DC.Ev.B.2",
"Description": "Mechanisms (e.g., antivirus alerts, log event alerts) are in place to alert management to potential attacks.",
"Attributes": [
{
"ItemId": "d3-dc-ev-b-2",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Detective Controls (DC)",
"Service": "aws"
}
],
"Checks": [
"guardduty_is_enabled"
]
},
{
"Id": "d3-dc-ev-b-3",
"Name": "D3.DC.Ev.B.3",
"Description": "Processes are in place to monitor for the presence of unauthorized users, devices, connections, and software.",
"Attributes": [
{
"ItemId": "d3-dc-ev-b-3",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Detective Controls (DC)",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_multi_region_enabled",
"guardduty_is_enabled",
"securityhub_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "d3-dc-th-b-1",
"Name": "D3.DC.Th.B.1",
"Description": "Independent testing (including penetration testing and vulnerability scanning) is conducted according to the risk assessment for external-facing systems and the internal network.",
"Attributes": [
{
"ItemId": "d3-dc-th-b-1",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Detective Controls (DC)",
"Service": "aws"
}
],
"Checks": [
"guardduty_is_enabled",
"securityhub_enabled",
"ssm_managed_compliant_patching"
]
},
{
"Id": "d3-pc-am-b-1",
"Name": "D3.PC.Am.B.1",
"Description": "Employee access is granted to systems and confidential data based on job responsibilities and the principles of least privilege.",
"Attributes": [
{
"ItemId": "d3-pc-am-b-1",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"ec2_instance_profile_attached",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
{
"Id": "d3-pc-am-b-10",
"Name": "D3.PC.Am.B.10",
"Description": "Production and non-production environments are segregated to prevent unauthorized access or changes to information assets. (*N/A if no production environment exists at the institution or the institution's third party.)",
"Attributes": [
{
"ItemId": "d3-pc-am-b-10",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "d3-pc-am-b-12",
"Name": "D3.PC.Am.B.12",
"Description": "All passwords are encrypted in storage and in transit.",
"Attributes": [
{
"ItemId": "d3-pc-am-b-12",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"apigateway_client_certificate_enabled",
"ec2_ebs_volume_encryption",
"ec2_ebs_default_encryption",
"efs_encryption_at_rest_enabled",
"opensearch_service_domains_encryption_at_rest_enabled",
"opensearch_service_domains_node_to_node_encryption_enabled",
"rds_instance_storage_encrypted",
"redshift_cluster_audit_logging",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy"
]
},
{
"Id": "d3-pc-am-b-13",
"Name": "D3.PC.Am.B.13",
"Description": "Confidential data is encrypted when transmitted across public or untrusted networks (e.g., Internet).",
"Attributes": [
{
"ItemId": "d3-pc-am-b-13",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"apigateway_client_certificate_enabled",
"elbv2_insecure_ssl_ciphers",
"elb_ssl_listeners",
"s3_bucket_secure_transport_policy"
]
},
{
"Id": "d3-pc-am-b-15",
"Name": "D3.PC.Am.B.15",
"Description": "Remote access to critical systems by employees, contractors, and third parties uses encrypted connections and multifactor authentication.",
"Attributes": [
{
"ItemId": "d3-pc-am-b-15",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"apigateway_client_certificate_enabled",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_user_mfa_enabled_console_access",
"s3_bucket_secure_transport_policy"
]
},
{
"Id": "d3-pc-am-b-16",
"Name": "D3.PC.Am.B.16",
"Description": "Administrative, physical, or technical controls are in place to prevent users without administrative responsibilities from installing unauthorized software.",
"Attributes": [
{
"ItemId": "d3-pc-am-b-16",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges"
]
},
{
"Id": "d3-pc-am-b-2",
"Name": "D3.PC.Am.B.2",
"Description": "Employee access to systems and confidential data provides for separation of duties.",
"Attributes": [
{
"ItemId": "d3-pc-am-b-2",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges"
]
},
{
"Id": "d3-pc-am-b-3",
"Name": "D3.PC.Am.B.3",
"Description": "Elevated privileges (e.g., administrator privileges) are limited and tightly controlled (e.g., assigned to individuals, not shared, and require stronger password controls",
"Attributes": [
{
"ItemId": "d3-pc-am-b-3",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key"
]
},
{
"Id": "d3-pc-am-b-6",
"Name": "D3.PC.Am.B.6",
"Description": "Identification and authentication are required and managed for access to systems, applications, and hardware.",
"Attributes": [
{
"ItemId": "d3-pc-am-b-6",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_rotate_access_key_90_days",
"iam_user_mfa_enabled_console_access",
"iam_user_mfa_enabled_console_access",
"iam_disable_90_days_credentials"
]
},
{
"Id": "d3-pc-am-b-7",
"Name": "D3.PC.Am.B.7",
"Description": "Access controls include password complexity and limits to password attempts and reuse.",
"Attributes": [
{
"ItemId": "d3-pc-am-b-7",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase"
]
},
{
"Id": "d3-pc-am-b-8",
"Name": "D3.PC.Am.B.8",
"Description": "All default passwords and unnecessary default accounts are changed before system implementation.",
"Attributes": [
{
"ItemId": "d3-pc-am-b-8",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"iam_no_root_access_key"
]
},
{
"Id": "d3-pc-im-b-1",
"Name": "D3.PC.Im.B.1",
"Description": "Network perimeter defense tools (e.g., border router and firewall) are used.",
"Attributes": [
{
"ItemId": "d3-pc-im-b-1",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"acm_certificates_expiration_check",
"apigateway_waf_acl_attached",
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"elbv2_waf_acl_attached",
"emr_cluster_master_nodes_no_public_ip",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
"rds_instance_no_public_access",
"rds_snapshots_public_access",
"redshift_cluster_public_access",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_account_level_public_access_blocks",
"s3_bucket_public_access",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"ec2_securitygroup_default_restrict_traffic",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "d3-pc-im-b-2",
"Name": "D3.PC.Im.B.2",
"Description": "Systems that are accessed from the Internet or by external parties are protected by firewalls or other similar devices.",
"Attributes": [
{
"ItemId": "d3-pc-im-b-2",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"apigateway_waf_acl_attached",
"elbv2_waf_acl_attached",
"ec2_securitygroup_default_restrict_traffic",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "d3-pc-im-b-3",
"Name": "D3.PC.Im.B.3",
"Description": "All ports are monitored.",
"Attributes": [
{
"ItemId": "d3-pc-im-b-3",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "d3-pc-im-b-5",
"Name": "D3.PC.Im.B.5",
"Description": "Systems configurations (for servers, desktops, routers, etc.) follow industry standards and are enforced",
"Attributes": [
{
"ItemId": "d3-pc-im-b-5",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"ec2_instance_managed_by_ssm",
"ssm_managed_compliant_patching",
"ssm_managed_compliant_patching"
]
},
{
"Id": "d3-pc-im-b-6",
"Name": "D3.PC.Im.B.6",
"Description": "Ports, functions, protocols and services are prohibited if no longer needed for business purposes.",
"Attributes": [
{
"ItemId": "d3-pc-im-b-6",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"ec2_securitygroup_default_restrict_traffic",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "d3-pc-im-b-7",
"Name": "D3.PC.Im.B.7",
"Description": "Access to make changes to systems configurations (including virtual machines and hypervisors) is controlled and monitored.",
"Attributes": [
{
"ItemId": "d3-pc-im-b-7",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges"
]
},
{
"Id": "d3-pc-se-b-1",
"Name": "D3.PC.Se.B.1",
"Description": "Developers working for the institution follow secure program coding practices, as part of a system development life cycle (SDLC), that meet industry standards.",
"Attributes": [
{
"ItemId": "d3-pc-se-b1",
"Section": "Cybersecurity Controls (Domain 3)",
"SubSection": "Preventative Controls (PC)",
"Service": "aws"
}
],
"Checks": []
},
{
"Id": "d4-c-co-b-2",
"Name": "D4.C.Co.B.2",
"Description": "The institution ensures that third-party connections are authorized.",
"Attributes": [
{
"ItemId": "d4-c-co-b-2",
"Section": "External Dependency Management (Domain 4)",
"SubSection": "Connections (C)",
"Service": "aws"
}
],
"Checks": [
"ec2_securitygroup_default_restrict_traffic",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "d5-dr-de-b-1",
"Name": "D5.DR.De.B.1",
"Description": "Alert parameters are set for detecting information security incidents that prompt mitigating actions.",
"Attributes": [
{
"ItemId": "d5-dr-de-b-1",
"Section": "Cyber Incident Management and Resilience (Domain 5)",
"SubSection": "Detection, Response, & Mitigation (DR)",
"Service": "aws"
}
],
"Checks": [
"cloudwatch_changes_to_network_acls_alarm_configured",
"cloudwatch_changes_to_network_gateways_alarm_configured",
"cloudwatch_changes_to_network_route_tables_alarm_configured",
"cloudwatch_changes_to_vpcs_alarm_configured",
"guardduty_is_enabled",
"securityhub_enabled"
]
},
{
"Id": "d5-dr-de-b-2",
"Name": "D5.DR.De.B.2",
"Description": "System performance reports contain information that can be used as a risk indicator to detect information security incidents.",
"Attributes": [
{
"ItemId": "d5-dr-de-b-2",
"Section": "Cyber Incident Management and Resilience (Domain 5)",
"SubSection": "Detection, Response, & Mitigation (DR)",
"Service": "aws"
}
],
"Checks": []
},
{
"Id": "d5-dr-de-b-3",
"Name": "D5.DR.De.B.3",
"Description": "Tools and processes are in place to detect, alert, and trigger the incident response program.",
"Attributes": [
{
"ItemId": "d5-dr-de-b-3",
"Section": "Cyber Incident Management and Resilience (Domain 5)",
"SubSection": "Detection, Response, & Mitigation (DR)",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"cloudwatch_changes_to_network_acls_alarm_configured",
"cloudwatch_changes_to_network_gateways_alarm_configured",
"cloudwatch_changes_to_network_route_tables_alarm_configured",
"cloudwatch_changes_to_vpcs_alarm_configured",
"elbv2_logging_enabled",
"elb_logging_enabled",
"guardduty_is_enabled",
"rds_instance_integration_cloudwatch_logs",
"redshift_cluster_audit_logging",
"s3_bucket_server_access_logging_enabled",
"securityhub_enabled"
]
},
{
"Id": "d5-er-es-b-4",
"Name": "D5.ER.Es.B.4",
"Description": "Incidents are classified, logged and tracked.",
"Attributes": [
{
"ItemId": "d5-er-es-b-4",
"Section": "Cyber Incident Management and Resilience (Domain 5)",
"SubSection": "Escalation and Reporting (ER)",
"Service": "aws"
}
],
"Checks": [
"guardduty_no_high_severity_findings"
]
},
{
"Id": "d5-ir-pl-b-6",
"Name": "D5.IR.Pl.B.6",
"Description": "The institution plans to use business continuity, disaster recovery, and data backup programs to recover operations following an incident.",
"Attributes": [
{
"ItemId": "d5-ir-pl-b-6",
"Section": "Cyber Incident Management and Resilience (Domain 5)",
"SubSection": "Incident Resilience Planning & Strategy (IR)",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"elbv2_deletion_protection",
"rds_instance_enhanced_monitoring_enabled",
"rds_instance_backup_enabled",
"rds_instance_deletion_protection",
"rds_instance_multi_az",
"rds_instance_backup_enabled",
"s3_bucket_object_versioning"
]
}
]
}

View File

@@ -0,0 +1,126 @@
{
"Framework": "GDPR",
"Version": "",
"Provider": "AWS",
"Description": "The General Data Protection Regulation (GDPR) is a new European privacy law that became enforceable on May 25, 2018. The GDPR replaces the EU Data Protection Directive, also known as Directive 95/46/EC. It's intended to harmonize data protection laws throughout the European Union (EU). It does this by applying a single data protection law that's binding throughout each EU member state.",
"Requirements": [
{
"Id": "article_25",
"Name": "Article 25 Data protection by design and by default",
"Description": "To obtain the latest version of the official guide, please visit https://gdpr-info.eu/art-25-gdpr/. Taking into account the state of the art, the cost of implementation and the nature, scope, context and purposes of processing as well as the risks of varying likelihood and severity for rights and freedoms of natural persons posed by the processing, the controller shall, both at the time of the determination of the means for processing and at the time of the processing itself, implement appropriate technical and organisational measures, such as pseudonymisation, which are designed to implement data-protection principles, such as data minimisation, in an effective manner and to integrate the necessary safeguards into the processing in order to meet the requirements of this Regulation and protect the rights of data subjects. The controller shall implement appropriate technical and organisational measures for ensuring that, by default, only personal data which are necessary for each specific purpose of the processing are processed. That obligation applies to the amount of personal data collected, the extent of their processing, the period of their storage and their accessibility. In particular, such measures shall ensure that by default personal data are not made accessible without the individual's intervention to an indefinite number of natural persons. An approved certification mechanism pursuant to Article 42 may be used as an element to demonstrate compliance with the requirements set out in paragraphs 1 and 2 of this Article.",
"Attributes": [
{
"ItemId": "article_25",
"Section": "Article 25 Data protection by design and by default",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_logs_s3_bucket_is_not_publicly_accessible",
"cloudtrail_multi_region_enabled",
"cloudtrail_logs_s3_bucket_access_logging_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"cloudtrail_kms_encryption_enabled",
"cloudtrail_log_file_validation_enabled",
"config_recorder_all_regions_enabled",
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_password_policy_reuse_24",
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
"iam_support_role_created",
"iam_rotate_access_key_90_days",
"iam_user_mfa_enabled_console_access",
"iam_disable_90_days_credentials",
"kms_cmk_rotation_enabled",
"cloudwatch_log_metric_filter_for_s3_bucket_policy_changes",
"cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled",
"cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled",
"cloudwatch_log_metric_filter_authentication_failures",
"cloudwatch_log_metric_filter_sign_in_without_mfa",
"cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk",
"cloudwatch_log_metric_filter_policy_changes",
"cloudwatch_log_metric_filter_root_usage",
"cloudwatch_log_metric_filter_security_group_changes",
"cloudwatch_log_metric_filter_unauthorized_api_calls",
"vpc_flow_logs_enabled"
]
},
{
"Id": "article_30",
"Name": "Article 30 Records of processing activities",
"Description": " To obtain the latest version of the official guide, please visit https://www.privacy-regulation.eu/en/article-30-records-of-processing-activities-GDPR.htm. Each controller and, where applicable, the controller's representative, shall maintain a record of processing activities under its responsibility. That record shall contain all of the following information like the name and contact details of the controller and where applicable, the joint controller, the controller's representative and the data protection officer, the purposes of the processing etc. Each processor and where applicable, the processor's representative shall maintain a record of all categories of processing activities carried out on behalf of a controller, containing the name and contact details of the processor or processors and of each controller on behalf of which the processor is acting, and, where applicable of the controller's or the processor's representative, and the data protection officer, where applicable, transfers of personal data to a third country or an international organisation, including the identification of that third country or international organisation and, in the case of transfers referred to in the second subparagraph of Article 49(1), the documentation of suitable safeguards. The records referred to in paragraphs 1 and 2 shall be in writing, including in electronic form. The controller or the processor and, where applicable, the controller's or the processor's representative, shall make the record available to the supervisory authority on request. The obligations referred to in paragraphs 1 and 2 shall not apply to an enterprise or an organisation employing fewer than 250 persons unless the processing it carries out is likely to result in a risk to the rights and freedoms of data subjects, the processing is not occasional, or the processing includes special categories of data as referred to in Article 9(1) or personal data relating to criminal convictions and offences referred to in Article 10.",
"Attributes": [
{
"ItemId": "article_30",
"Section": "Article 30 Records of processing activities",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"cloudtrail_kms_encryption_enabled",
"config_recorder_all_regions_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"kms_cmk_rotation_enabled",
"redshift_cluster_audit_logging",
"vpc_flow_logs_enabled"
]
},
{
"Id": "article_32",
"Name": "Article 32 Security of processing",
"Description": " To obtain the latest version of the official guide, please visit https://gdpr-info.eu/art-32-gdpr/. Taking into account the state of the art, the costs of implementation and the nature, scope, context and purposes of processing as well as the risk of varying likelihood and severity for the rights and freedoms of natural persons, the controller and the processor shall implement appropriate technical and organisational measures to ensure a level of security appropriate to the risk, including inter alia as appropriate. In assessing the appropriate level of security account shall be taken in particular of the risks that are presented by processing, in particular from accidental or unlawful destruction, loss, alteration, unauthorised disclosure of, or access to personal data transmitted, stored or otherwise processed. Adherence to an approved code of conduct as referred to in Article 40 or an approved certification mechanism as referred to in Article 42 may be used as an element by which to demonstrate compliance with the requirements set out in paragraph 1 of this Article. The controller and processor shall take steps to ensure that any natural person acting under the authority of the controller or the processor who has access to personal data does not process them except on instructions from the controller, unless he or she is required to do so by Union or Member State law.",
"Attributes": [
{
"ItemId": "article_32",
"Section": "Article 32 Security of processing",
"Service": "aws"
}
],
"Checks": [
"acm_certificates_expiration_check",
"cloudfront_distributions_https_enabled",
"cloudtrail_kms_encryption_enabled",
"cloudtrail_log_file_validation_enabled",
"dynamodb_accelerator_cluster_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"ec2_ebs_volume_encryption",
"ec2_ebs_volume_encryption",
"efs_encryption_at_rest_enabled",
"elb_ssl_listeners",
"opensearch_service_domains_encryption_at_rest_enabled",
"opensearch_service_domains_node_to_node_encryption_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"rds_instance_storage_encrypted",
"rds_instance_backup_enabled",
"rds_instance_integration_cloudwatch_logs",
"rds_instance_storage_encrypted",
"redshift_cluster_automated_snapshot",
"redshift_cluster_audit_logging",
"s3_bucket_default_encryption",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"sagemaker_notebook_instance_encryption_enabled",
"sns_topics_kms_encryption_at_rest_enabled"
]
}
]
}

View File

@@ -0,0 +1,347 @@
{
"Framework": "GxP-21-CFR-Part-11",
"Version": "",
"Provider": "AWS",
"Description": "GxP refers to the regulations and guidelines that are applicable to life sciences organizations that make food and medical products. Medical products that fall under this include medicines, medical devices, and medical software applications. The overall intent of GxP requirements is to ensure that food and medical products are safe for consumers. It's also to ensure the integrity of data that's used to make product-related safety decisions.",
"Requirements": [
{
"Id": "11.10-a",
"Name": "11.10(a)",
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (a) Validation of systems to ensure accuracy, reliability, consistent intended performance, and the ability to discern invalid or altered records.",
"Attributes": [
{
"ItemId": "11.10-a",
"Section": "11.10 Controls for closed systems",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_log_file_validation_enabled",
"dynamodb_tables_pitr_enabled",
"ec2_instance_managed_by_ssm",
"ec2_instance_older_than_specific_days",
"elbv2_deletion_protection",
"rds_instance_backup_enabled",
"rds_instance_deletion_protection",
"rds_instance_backup_enabled",
"rds_instance_multi_az",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning",
"ssm_managed_compliant_patching",
"ssm_managed_compliant_patching"
]
},
{
"Id": "11.10-c",
"Name": "11.10(c)",
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (c) Protection of records to enable their accurate and ready retrieval throughout the records retention period.",
"Attributes": [
{
"ItemId": "11.10-c",
"Section": "11.10 Controls for closed systems",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_kms_encryption_enabled",
"cloudwatch_log_group_retention_policy_specific_days_enabled",
"rds_instance_storage_encrypted",
"rds_instance_storage_encrypted",
"rds_snapshots_public_access",
"redshift_cluster_audit_logging",
"redshift_cluster_public_access",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_bucket_object_versioning",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"sagemaker_notebook_instance_encryption_enabled"
]
},
{
"Id": "11.10-d",
"Name": "11.10(d)",
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (d) Limiting system access to authorized individuals.",
"Attributes": [
{
"ItemId": "11.10-d",
"Section": "11.10 Controls for closed systems",
"Service": "aws"
}
],
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_instance_profile_attached",
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
"iam_user_mfa_enabled_console_access",
"iam_user_mfa_enabled_console_access",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
"rds_instance_no_public_access",
"rds_snapshots_public_access",
"redshift_cluster_public_access",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_account_level_public_access_blocks",
"s3_bucket_public_access",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"secretsmanager_automatic_rotation_enabled",
"ec2_securitygroup_default_restrict_traffic",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "11.10-e",
"Name": "11.10(e)",
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (e) Use of secure, computer-generated, time-stamped audit trails to independently record the date and time of operator entries and actions that create, modify, or delete electronic records. Record changes shall not obscure previously recorded information. Such audit trail documentation shall be retained for a period at least as long as that required for the subject electronic records and shall be available for agency review and copying.",
"Attributes": [
{
"ItemId": "11.10-d",
"Section": "11.10 Controls for closed systems",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"cloudwatch_log_group_retention_policy_specific_days_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"opensearch_service_domains_cloudwatch_logging_enabled",
"rds_instance_integration_cloudwatch_logs",
"redshift_cluster_audit_logging",
"s3_bucket_server_access_logging_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "11.10-g",
"Name": "11.10(g)",
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (g) Use of authority checks to ensure that only authorized individuals can use the system, electronically sign a record, access the operation or computer system input or output device, alter a record, or perform the operation at hand.",
"Attributes": [
{
"ItemId": "11.10-g",
"Section": "11.10 Controls for closed systems",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_kms_cmk_encryption_enabled",
"ec2_ebs_volume_encryption",
"ec2_ebs_public_snapshot",
"ec2_ebs_default_encryption",
"ec2_instance_profile_attached",
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"efs_encryption_at_rest_enabled",
"emr_cluster_master_nodes_no_public_ip",
"opensearch_service_domains_encryption_at_rest_enabled",
"opensearch_service_domains_node_to_node_encryption_enabled",
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
"iam_user_mfa_enabled_console_access",
"iam_user_mfa_enabled_console_access",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
"rds_instance_no_public_access",
"rds_snapshots_public_access",
"redshift_cluster_public_access",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_account_level_public_access_blocks",
"s3_bucket_public_access",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"secretsmanager_automatic_rotation_enabled",
"ec2_securitygroup_default_restrict_traffic",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "11.10-h",
"Name": "11.10(h)",
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (h) Use of device (e.g., terminal) checks to determine, as appropriate, the validity of the source of data input or operational instruction.",
"Attributes": [
{
"ItemId": "11.10-h",
"Section": "11.10 Controls for closed systems",
"Service": "aws"
}
],
"Checks": [
"ec2_instance_managed_by_ssm",
"ssm_managed_compliant_patching",
"ssm_managed_compliant_patching"
]
},
{
"Id": "11.10-k",
"Name": "11.10(k)",
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (k) Use of appropriate controls over systems documentation including: (1) Adequate controls over the distribution of, access to, and use of documentation for system operation and maintenance. (2) Revision and change control procedures to maintain an audit trail that documents time-sequenced development and modification of systems documentation.",
"Attributes": [
{
"ItemId": "11.10-k",
"Section": "11.10 Controls for closed systems",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"ec2_ebs_public_snapshot",
"emr_cluster_master_nodes_no_public_ip",
"rds_instance_integration_cloudwatch_logs",
"rds_instance_no_public_access",
"rds_snapshots_public_access",
"redshift_cluster_public_access",
"s3_bucket_server_access_logging_enabled",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"ec2_securitygroup_default_restrict_traffic",
"ec2_networkacl_allow_ingress_any_port",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "11.30",
"Name": "11.30 Controls for open systems",
"Description": "Persons who use open systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, as appropriate, the confidentiality of electronic records from the point of their creation to the point of their receipt. Such procedures and controls shall include those identified in 11.10, as appropriate, and additional measures such as document encryption and use of appropriate digital signature standards to ensure, as necessary under the circumstances, record authenticity, integrity, and confidentiality.",
"Attributes": [
{
"ItemId": "11.30",
"Section": "11.30 Controls for open systems",
"Service": "aws"
}
],
"Checks": [
"apigateway_client_certificate_enabled",
"cloudtrail_kms_encryption_enabled",
"cloudtrail_log_file_validation_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"ec2_ebs_volume_encryption",
"ec2_ebs_default_encryption",
"efs_encryption_at_rest_enabled",
"elbv2_insecure_ssl_ciphers",
"elb_ssl_listeners",
"opensearch_service_domains_encryption_at_rest_enabled",
"opensearch_service_domains_node_to_node_encryption_enabled",
"kms_cmk_rotation_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"rds_instance_storage_encrypted",
"rds_instance_storage_encrypted",
"redshift_cluster_audit_logging",
"s3_bucket_default_encryption",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"sagemaker_notebook_instance_encryption_enabled",
"sns_topics_kms_encryption_at_rest_enabled"
]
},
{
"Id": "11.200",
"Name": "11.200 Electronic signature components and controls",
"Description": "(a) Electronic signatures that are not based upon biometrics shall: (1) Employ at least two distinct identification components such as an identification code and password. (i) When an individual executes a series of signings during a single, continuous period of controlled system access, the first signing shall be executed using all electronic signature components; subsequent signings shall be executed using at least one electronic signature component that is only executable by, and designed to be used only by, the individual. (ii) When an individual executes one or more signings not performed during a single, continuous period of controlled system access, each signing shall be executed using all of the electronic signature components. (2) Be used only by their genuine owners; and (3) Be administered and executed to ensure that attempted use of an individual's electronic signature by anyone other than its genuine owner requires collaboration of two or more individuals.",
"Attributes": [
{
"ItemId": "11.200",
"Section": "11.200 Electronic signature components and controls",
"Service": "aws"
}
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
"iam_user_mfa_enabled_console_access",
"iam_user_mfa_enabled_console_access"
]
},
{
"Id": "11.300-b",
"Name": "11.300(b)",
"Description": "Persons who use electronic signatures based upon use of identification codes in combination with passwords shall employ controls to ensure their security and integrity. Such controls shall include: (b) Ensuring that identification code and password issuances are periodically checked, recalled, or revised (e.g., to cover such events as password aging).",
"Attributes": [
{
"ItemId": "11.300-b",
"Section": "11.300 Controls for identification codes/passwords",
"Service": "aws"
}
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_rotate_access_key_90_days",
"iam_disable_90_days_credentials",
"secretsmanager_automatic_rotation_enabled"
]
},
{
"Id": "11.300-d",
"Name": "11.300(d)",
"Description": "Persons who use electronic signatures based upon use of identification codes in combination with passwords shall employ controls to ensure their security and integrity. Such controls shall include: (d) Use of transaction safeguards to prevent unauthorized use of passwords and/or identification codes, and to detect and report in an immediate and urgent manner any attempts at their unauthorized use to the system security unit, and, as appropriate, to organizational management.",
"Attributes": [
{
"ItemId": "11.300-d",
"Section": "11.300 Controls for identification codes/passwords",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"guardduty_is_enabled",
"securityhub_enabled"
]
}
]
}

View File

@@ -0,0 +1,281 @@
{
"Framework": "GxP-EU-Annex-11",
"Version": "",
"Provider": "AWS",
"Description": "The GxP EU Annex 11 framework is the European equivalent to the FDA 21 CFR part 11 framework in the United States. This annex applies to all forms of computerized systems that are used as part of Good Manufacturing Practices (GMP) regulated activities. A computerized system is a set of software and hardware components that together fulfill certain functionalities. The application should be validated and IT infrastructure should be qualified. Where a computerized system replaces a manual operation, there should be no resultant decrease in product quality, process control, or quality assurance. There should be no increase in the overall risk of the process.",
"Requirements": [
{
"Id": "1-risk-management",
"Name": "1 Risk Management",
"Description": "Risk management should be applied throughout the lifecycle of the computerised system taking into account patient safety, data integrity and product quality. As part of a risk management system, decisions on the extent of validation and data integrity controls should be based on a justified and documented risk assessment of the computerised system.",
"Attributes": [
{
"ItemId": "1-risk-management",
"Section": "General",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_multi_region_enabled",
"securityhub_enabled"
]
},
{
"Id": "5-data",
"Name": "5 Data",
"Description": "Computerised systems exchanging data electronically with other systems should include appropriate built-in checks for the correct and secure entry and processing of data, in order to minimize the risks.",
"Attributes": [
{
"ItemId": "5-data",
"Section": "Operational Phase",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "7.1-data-storage-damage-protection",
"Name": "7.1 Data Storage - Damage Protection",
"Description": "Data should be secured by both physical and electronic means against damage. Stored data should be checked for accessibility, readability and accuracy. Access to data should be ensured throughout the retention period.",
"Attributes": [
{
"ItemId": "7.1-data-storage-damage-protection",
"Section": "Operational Phase",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_kms_encryption_enabled",
"dynamodb_accelerator_cluster_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"dynamodb_tables_pitr_enabled",
"ec2_ebs_volume_encryption",
"ec2_ebs_default_encryption",
"efs_encryption_at_rest_enabled",
"eks_cluster_kms_cmk_encryption_in_secrets_enabled",
"opensearch_service_domains_encryption_at_rest_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"rds_instance_backup_enabled",
"rds_instance_storage_encrypted",
"rds_instance_backup_enabled",
"rds_instance_storage_encrypted",
"redshift_cluster_automated_snapshot",
"redshift_cluster_audit_logging",
"s3_bucket_default_encryption",
"s3_bucket_default_encryption",
"s3_bucket_object_versioning",
"sagemaker_notebook_instance_encryption_enabled",
"sns_topics_kms_encryption_at_rest_enabled"
]
},
{
"Id": "7.2-data-storage-backups",
"Name": "7.2 Data Storage - Backups",
"Description": "Regular back-ups of all relevant data should be done. Integrity and accuracy of backup data and the ability to restore the data should be checked during validation and monitored periodically.",
"Attributes": [
{
"ItemId": "7.2-data-storage-backups",
"Section": "Operational Phase",
"Service": "aws"
}
],
"Checks": [
"rds_instance_backup_enabled",
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "8.2-printouts-data-changes",
"Name": "8.2 Printouts - Data Changes",
"Description": "For records supporting batch release it should be possible to generate printouts indicating if any of the data has been changed since the original entry.",
"Attributes": [
{
"ItemId": "8.2-printouts-data-changes",
"Section": "Operational Phase",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled"
]
},
{
"Id": "9-audit-trails",
"Name": "9 Audit Trails",
"Description": "Consideration should be given, based on a risk assessment, to building into the system the creation of a record of all GMP-relevant changes and deletions (a system generated 'audit trail'). For change or deletion of GMP-relevant data the reason should be documented. Audit trails need to be available and convertible to a generally intelligible form and regularly reviewed.",
"Attributes": [
{
"ItemId": "9-audit-trails",
"Section": "Operational Phase",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled"
]
},
{
"Id": "10-change-and-configuration-management",
"Name": "10 Change and Configuration Management",
"Description": "Any changes to a computerised system including system configurations should only be made in a controlled manner in accordance with a defined procedure.",
"Attributes": [
{
"ItemId": "10-change-and-configuration-management",
"Section": "Operational Phase",
"Service": "aws"
}
],
"Checks": [
"config_recorder_all_regions_enabled"
]
},
{
"Id": "12.4-security-audit-trail",
"Name": "12.4 Security - Audit Trail",
"Description": "Management systems for data and for documents should be designed to record the identity of operators entering, changing, confirming or deleting data including date and time.",
"Attributes": [
{
"ItemId": "12.4-security-audit-trail",
"Section": "Operational Phase",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled"
]
},
{
"Id": "16-business-continuity",
"Name": "16 Business Continuity",
"Description": "For the availability of computerised systems supporting critical processes, provisions should be made to ensure continuity of support for those processes in the event of a system breakdown (e.g. a manual or alternative system). The time required to bring the alternative arrangements into use should be based on risk and appropriate for a particular system and the business process it supports. These arrangements should be adequately documented and tested.",
"Attributes": [
{
"ItemId": "16-business-continuity",
"Section": "Operational Phase",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "17-archiving",
"Name": "17 Archiving",
"Description": "Data may be archived. This data should be checked for accessibility, readability and integrity. If relevant changes are to be made to the system (e.g. computer equipment or programs), then the ability to retrieve the data should be ensured and tested.",
"Attributes": [
{
"ItemId": "17-archiving",
"Section": "Operational Phase",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "4.2-validation-documentation-change-control",
"Name": "4.2 Validation - Documentation Change Control",
"Description": "Validation documentation should include change control records (if applicable) and reports on any deviations observed during the validation process.",
"Attributes": [
{
"ItemId": "4.2-validation-documentation-change-control",
"Section": "Project Phase",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_multi_region_enabled"
]
},
{
"Id": "4.5-validation-development-quality",
"Name": "4.5 Validation - Development Quality",
"Description": "The regulated user should take all reasonable steps, to ensure that the system has been developed in accordance with an appropriate quality management system. The supplier should be assessed appropriately.",
"Attributes": [
{
"ItemId": "4.5-validation-development-quality",
"Section": "Project Phase",
"Service": "aws"
}
],
"Checks": [
"config_recorder_all_regions_enabled"
]
},
{
"Id": "4.6-validation-quality-performance",
"Name": "4.6 Validation - Quality and Performance",
"Description": "For the validation of bespoke or customised computerised systems there should be a process in place that ensures the formal assessment and reporting of quality and performance measures for all the life-cycle stages of the system.",
"Attributes": [
{
"ItemId": "4.6-validation-quality-performance",
"Section": "Project Phase",
"Service": "aws"
}
],
"Checks": [
"config_recorder_all_regions_enabled"
]
},
{
"Id": "4.8-validation-data-transfer",
"Name": "4.8 Validation - Data Transfer",
"Description": "If data are transferred to another data format or system, validation should include checks that data are not altered in value and/or meaning during this migration process.",
"Attributes": [
{
"ItemId": "4.8-validation-data-transfer",
"Section": "Project Phase",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
}
]
}

View File

@@ -0,0 +1,773 @@
{
"Framework": "HIPAA",
"Version": "",
"Provider": "AWS",
"Description": "The Health Insurance Portability and Accountability Act of 1996 (HIPAA) is legislation that helps US workers to retain health insurance coverage when they change or lose jobs. The legislation also seeks to encourage electronic health records to improve the efficiency and quality of the US healthcare system through improved information sharing.",
"Requirements": [
{
"Id": "164_308_a_1_ii_a",
"Name": "164.308(a)(1)(ii)(A) Risk analysis",
"Description": "Conduct an accurate and thorough assessment of the potential risks and vulnerabilities to the confidentiality, integrity, and availability of electronic protected health information held by the covered entity or business associate.",
"Attributes": [
{
"ItemId": "164_308_a_1_ii_a",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"config_recorder_all_regions_enabled",
"guardduty_is_enabled"
]
},
{
"Id": "164_308_a_1_ii_b",
"Name": "164.308(a)(1)(ii)(B) Risk Management",
"Description": "Implement security measures sufficient to reduce risks and vulnerabilities to a reasonable and appropriate level to comply with 164.306(a): Ensure the confidentiality, integrity, and availability of all electronic protected health information the covered entity or business associate creates, receives, maintains, or transmits.",
"Attributes": [
{
"ItemId": "164_308_a_1_ii_b",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_kms_encryption_enabled",
"cloudtrail_log_file_validation_enabled",
"dynamodb_tables_pitr_enabled",
"ec2_ebs_public_snapshot",
"ec2_ebs_volume_encryption",
"ec2_ebs_default_encryption",
"ec2_instance_public_ip",
"ec2_instance_older_than_specific_days",
"efs_encryption_at_rest_enabled",
"elbv2_deletion_protection",
"elb_ssl_listeners",
"emr_cluster_master_nodes_no_public_ip",
"opensearch_service_domains_encryption_at_rest_enabled",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
"cloudwatch_log_group_kms_encryption_enabled",
"rds_instance_backup_enabled",
"rds_instance_storage_encrypted",
"rds_instance_multi_az",
"rds_instance_storage_encrypted",
"rds_snapshots_public_access",
"redshift_cluster_audit_logging",
"redshift_cluster_public_access",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_bucket_object_versioning",
"s3_account_level_public_access_blocks",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"sagemaker_notebook_instance_encryption_enabled",
"sns_topics_kms_encryption_at_rest_enabled",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "164_308_a_1_ii_d",
"Name": "164.308(a)(1)(ii)(D) Information system activity review",
"Description": "Implement procedures to regularly review records of information system activity, such as audit logs, access reports, and security incident tracking reports.",
"Attributes": [
{
"ItemId": "164_308_a_1_ii_d",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"cloudtrail_kms_encryption_enabled",
"cloudtrail_log_file_validation_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"guardduty_is_enabled",
"redshift_cluster_audit_logging",
"s3_bucket_server_access_logging_enabled",
"securityhub_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "164_308_a_3_i",
"Name": "164.308(a)(3)(i) Workforce security",
"Description": "Implement policies and procedures to ensure that all members of its workforce have appropriate access to electronic protected health information, as provided under paragraph (a)(4) of this section, and to prevent those workforce members who do not have access under paragraph (a)(4) of this section from obtaining access to electronic protected health information.",
"Attributes": [
{
"ItemId": "164_308_a_3_i",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
"rds_instance_no_public_access",
"rds_snapshots_public_access",
"redshift_cluster_public_access",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_account_level_public_access_blocks",
"sagemaker_notebook_instance_without_direct_internet_access_configured"
]
},
{
"Id": "164_308_a_3_ii_a",
"Name": "164.308(a)(3)(ii)(A) Authorization and/or supervision",
"Description": "Implement procedures for the authorization and/or supervision of workforce members who work with electronic protected health information or in locations where it might be accessed.",
"Attributes": [
{
"ItemId": "164_308_a_3_ii_a",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"guardduty_is_enabled",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_user_mfa_enabled_console_access",
"iam_user_mfa_enabled_console_access",
"redshift_cluster_audit_logging",
"s3_bucket_server_access_logging_enabled",
"securityhub_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "164_308_a_3_ii_b",
"Name": "164.308(a)(3)(ii)(B) Workforce clearance procedure",
"Description": "Implement procedures to determine that the access of a workforce member to electronic protected health information is appropriate.",
"Attributes": [
{
"ItemId": "164_308_a_3_ii_b",
"Section": "164.308 Administrative Safeguards",
"Service": "iam"
}
],
"Checks": [
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials"
]
},
{
"Id": "164_308_a_3_ii_c",
"Name": "164.308(a)(3)(ii)(C) Termination procedures",
"Description": "Implement procedures for terminating access to electronic protected health information when the employment of, or other arrangement with, a workforce member ends or as required by determinations made as specified in paragraph (a)(3)(ii)(b).",
"Attributes": [
{
"ItemId": "164_308_a_3_ii_c",
"Section": "164.308 Administrative Safeguards",
"Service": "iam"
}
],
"Checks": [
"iam_rotate_access_key_90_days"
]
},
{
"Id": "164_308_a_4_i",
"Name": "164.308(a)(4)(i) Information access management",
"Description": "Implement policies and procedures for authorizing access to electronic protected health information that are consistent with the applicable requirements of subpart E of this part.",
"Attributes": [
{
"ItemId": "164_308_a_4_i",
"Section": "164.308 Administrative Safeguards",
"Service": "iam"
}
],
"Checks": [
"iam_policy_no_administrative_privileges"
]
},
{
"Id": "164_308_a_4_ii_a",
"Name": "164.308(a)(4)(ii)(A) Isolating health care clearinghouse functions",
"Description": "If a health care clearinghouse is part of a larger organization, the clearinghouse must implement policies and procedures that protect the electronic protected health information of the clearinghouse from unauthorized access by the larger organization.",
"Attributes": [
{
"ItemId": "164_308_a_4_ii_a",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"acm_certificates_expiration_check",
"cloudfront_distributions_https_enabled",
"cloudtrail_kms_encryption_enabled",
"dynamodb_accelerator_cluster_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"ec2_ebs_volume_encryption",
"ec2_ebs_volume_encryption",
"ec2_ebs_default_encryption",
"efs_encryption_at_rest_enabled",
"eks_cluster_kms_cmk_encryption_in_secrets_enabled",
"elb_ssl_listeners",
"opensearch_service_domains_encryption_at_rest_enabled",
"opensearch_service_domains_node_to_node_encryption_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"rds_instance_storage_encrypted",
"rds_instance_backup_enabled",
"rds_instance_integration_cloudwatch_logs",
"rds_instance_storage_encrypted",
"redshift_cluster_automated_snapshot",
"redshift_cluster_audit_logging",
"s3_bucket_default_encryption",
"s3_bucket_default_encryption",
"sagemaker_notebook_instance_encryption_enabled",
"sns_topics_kms_encryption_at_rest_enabled"
]
},
{
"Id": "164_308_a_4_ii_b",
"Name": "164.308(a)(4)(ii)(B) Access authorization",
"Description": "Implement policies and procedures for granting access to electronic protected health information, As one illustrative example, through access to a workstation, transaction, program, process, or other mechanism.",
"Attributes": [
{
"ItemId": "164_308_a_4_ii_b",
"Section": "164.308 Administrative Safeguards",
"Service": "iam"
}
],
"Checks": [
"iam_policy_no_administrative_privileges"
]
},
{
"Id": "164_308_a_4_ii_c",
"Name": "164.308(a)(4)(ii)(B) Access authorization",
"Description": "Implement policies and procedures that, based upon the covered entity's or the business associate's access authorization policies, establish, document, review, and modify a user's right of access to a workstation, transaction, program, or process.",
"Attributes": [
{
"ItemId": "164_308_a_4_ii_c",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"iam_password_policy_reuse_24",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
"iam_disable_90_days_credentials",
"secretsmanager_automatic_rotation_enabled"
]
},
{
"Id": "164_308_a_5_ii_b",
"Name": "164.308(a)(5)(ii)(B) Protection from malicious software",
"Description": "Procedures for guarding against, detecting, and reporting malicious software.",
"Attributes": [
{
"ItemId": "164_308_a_5_ii_b",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"ec2_instance_managed_by_ssm",
"ssm_managed_compliant_patching",
"ssm_managed_compliant_patching"
]
},
{
"Id": "164_308_a_5_ii_c",
"Name": "164.308(a)(5)(ii)(C) Log-in monitoring",
"Description": "Procedures for monitoring log-in attempts and reporting discrepancies.",
"Attributes": [
{
"ItemId": "164_308_a_5_ii_c",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"guardduty_is_enabled",
"cloudwatch_log_metric_filter_authentication_failures",
"securityhub_enabled"
]
},
{
"Id": "164_308_a_5_ii_d",
"Name": "164.308(a)(5)(ii)(D) Password management",
"Description": "Procedures for creating, changing, and safeguarding passwords.",
"Attributes": [
{
"ItemId": "164_308_a_5_ii_d",
"Section": "164.308 Administrative Safeguards",
"Service": "iam"
}
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_password_policy_reuse_24",
"iam_rotate_access_key_90_days",
"iam_disable_90_days_credentials"
]
},
{
"Id": "164_308_a_6_i",
"Name": "164.308(a)(6)(i) Security incident procedures",
"Description": "Implement policies and procedures to address security incidents.",
"Attributes": [
{
"ItemId": "164_308_a_6_i",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"cloudwatch_changes_to_network_acls_alarm_configured",
"cloudwatch_changes_to_network_gateways_alarm_configured",
"cloudwatch_changes_to_network_route_tables_alarm_configured",
"cloudwatch_changes_to_vpcs_alarm_configured",
"guardduty_is_enabled",
"cloudwatch_log_metric_filter_authentication_failures",
"cloudwatch_log_metric_filter_root_usage",
"securityhub_enabled"
]
},
{
"Id": "164_308_a_6_ii",
"Name": "164.308(a)(6)(ii) Response and reporting",
"Description": "Identify and respond to suspected or known security incidents; mitigate, to the extent practicable, harmful effects of security incidents that are known to the covered entity or business associate; and document security incidents and their outcomes.",
"Attributes": [
{
"ItemId": "164_308_a_6_ii",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"guardduty_is_enabled",
"guardduty_no_high_severity_findings",
"cloudwatch_log_metric_filter_authentication_failures",
"cloudwatch_log_metric_filter_root_usage",
"s3_bucket_server_access_logging_enabled",
"securityhub_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "164_308_a_7_i",
"Name": "164.308(a)(7)(i) Contingency plan",
"Description": "Establish (and implement as needed) policies and procedures for responding to an emergency or other occurrence (for example, fire, vandalism, system failure, and natural disaster) that damages systems that contain electronic protected health information.",
"Attributes": [
{
"ItemId": "164_308_a_7_i",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_multi_az",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "164_308_a_7_ii_a",
"Name": "164.308(a)(7)(ii)(A) Data backup plan",
"Description": "Establish and implement procedures to create and maintain retrievable exact copies of electronic protected health information.",
"Attributes": [
{
"ItemId": "164_308_a_7_ii_a",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_multi_az",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "164_308_a_7_ii_b",
"Name": "164.308(a)(7)(ii)(B) Disaster recovery plan",
"Description": "Establish (and implement as needed) procedures to restore any loss of data.",
"Attributes": [
{
"ItemId": "164_308_a_7_ii_b",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_multi_az",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "164_308_a_7_ii_c",
"Name": "164.308(a)(7)(ii)(C) Emergency mode operation plan",
"Description": "Establish (and implement as needed) procedures to enable continuation of critical business processes for protection of the security of electronic protected health information while operating in emergency mode.",
"Attributes": [
{
"ItemId": "164_308_a_7_ii_c",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_multi_az",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "164_308_a_8",
"Name": "164.308(a)(8) Evaluation",
"Description": "Perform a periodic technical and nontechnical evaluation, based initially upon the standards implemented under this rule and subsequently, in response to environmental or operational changes affecting the security of electronic protected health information, that establishes the extent to which an entity's security policies and procedures meet the requirements of this subpart.",
"Attributes": [
{
"ItemId": "164_308_a_8",
"Section": "164.308 Administrative Safeguards",
"Service": "aws"
}
],
"Checks": [
"guardduty_is_enabled",
"securityhub_enabled"
]
},
{
"Id": "164_312_a_1",
"Name": "164.312(a)(1) Access control",
"Description": "Implement technical policies and procedures for electronic information systems that maintain electronic protected health information to allow access only to those persons or software programs that have been granted access rights as specified in 164.308(a)(4).",
"Attributes": [
{
"ItemId": "164_312_a_1",
"Section": "164.312 Technical Safeguards",
"Service": "aws"
}
],
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_no_administrative_privileges",
"iam_user_mfa_enabled_console_access",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
"rds_instance_no_public_access",
"rds_snapshots_public_access",
"redshift_cluster_public_access",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_bucket_public_access",
"sagemaker_notebook_instance_without_direct_internet_access_configured"
]
},
{
"Id": "164_312_a_2_i",
"Name": "164.312(a)(2)(i) Unique user identification",
"Description": "Assign a unique name and/or number for identifying and tracking user identity.",
"Attributes": [
{
"ItemId": "164_312_a_2_i",
"Section": "164.312 Technical Safeguards",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"iam_no_root_access_key",
"s3_bucket_public_access"
]
},
{
"Id": "164_312_a_2_ii",
"Name": "164.312(a)(2)(ii) Emergency access procedure",
"Description": "Establish (and implement as needed) procedures for obtaining necessary electronic protected health information during an emergency.",
"Attributes": [
{
"ItemId": "164_312_a_2_ii",
"Section": "164.312 Technical Safeguards",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "164_312_a_2_iv",
"Name": "164.312(a)(2)(iv) Encryption and decryption",
"Description": "Implement a mechanism to encrypt and decrypt electronic protected health information.",
"Attributes": [
{
"ItemId": "164_312_a_2_iv",
"Section": "164.312 Technical Safeguards",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_kms_encryption_enabled",
"dynamodb_accelerator_cluster_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"ec2_ebs_volume_encryption",
"ec2_ebs_default_encryption",
"efs_encryption_at_rest_enabled",
"eks_cluster_kms_cmk_encryption_in_secrets_enabled",
"opensearch_service_domains_encryption_at_rest_enabled",
"kms_cmk_rotation_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"rds_instance_storage_encrypted",
"rds_instance_storage_encrypted",
"redshift_cluster_audit_logging",
"s3_bucket_default_encryption",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"sagemaker_notebook_instance_encryption_enabled",
"sns_topics_kms_encryption_at_rest_enabled"
]
},
{
"Id": "164_312_b",
"Name": "164.312(b) Audit controls",
"Description": "Implement hardware, software, and/or procedural mechanisms that record and examine activity in information systems that contain or use electronic protected health information.",
"Attributes": [
{
"ItemId": "164_312_b",
"Section": "164.312 Technical Safeguards",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"cloudtrail_log_file_validation_enabled",
"cloudwatch_log_group_retention_policy_specific_days_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"guardduty_is_enabled",
"rds_instance_integration_cloudwatch_logs",
"redshift_cluster_audit_logging",
"s3_bucket_server_access_logging_enabled",
"securityhub_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "164_312_c_1",
"Name": "164.312(c)(1) Integrity",
"Description": "Implement policies and procedures to protect electronic protected health information from improper alteration or destruction.",
"Attributes": [
{
"ItemId": "164_312_c_1",
"Section": "164.312 Technical Safeguards",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_kms_encryption_enabled",
"cloudtrail_log_file_validation_enabled",
"ec2_ebs_volume_encryption",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"s3_bucket_object_versioning"
]
},
{
"Id": "164_312_c_2",
"Name": "164.312(c)(2) Mechanism to authenticate electronic protected health information",
"Description": "Implement electronic mechanisms to corroborate that electronic protected health information has not been altered or destroyed in an unauthorized manner.",
"Attributes": [
{
"ItemId": "164_312_c_2",
"Section": "164.312 Technical Safeguards",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_kms_encryption_enabled",
"cloudtrail_log_file_validation_enabled",
"ec2_ebs_volume_encryption",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"s3_bucket_object_versioning",
"vpc_flow_logs_enabled"
]
},
{
"Id": "164_312_d",
"Name": "164.312(d) Person or entity authentication",
"Description": "Implement procedures to verify that a person or entity seeking access to electronic protected health information is the one claimed.",
"Attributes": [
{
"ItemId": "164_312_d",
"Section": "164.312 Technical Safeguards",
"Service": "iam"
}
],
"Checks": [
"iam_password_policy_reuse_24",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_user_mfa_enabled_console_access",
"iam_user_mfa_enabled_console_access"
]
},
{
"Id": "164_312_e_1",
"Name": "164.312(e)(1) Transmission security",
"Description": "Implement technical security measures to guard against unauthorized access to electronic protected health information that is being transmitted over an electronic communications network.",
"Attributes": [
{
"ItemId": "164_312_e_1",
"Section": "164.312 Technical Safeguards",
"Service": "aws"
}
],
"Checks": [
"acm_certificates_expiration_check",
"cloudfront_distributions_https_enabled",
"elb_ssl_listeners",
"opensearch_service_domains_node_to_node_encryption_enabled",
"awslambda_function_not_publicly_accessible",
"s3_bucket_secure_transport_policy",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "164_312_e_2_i",
"Name": "164.312(e)(2)(i) Integrity controls",
"Description": "Implement security measures to ensure that electronically transmitted electronic protected health information is not improperly modified without detection until disposed of.",
"Attributes": [
{
"ItemId": "164_312_e_2_i",
"Section": "164.312 Technical Safeguards",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"elb_ssl_listeners",
"guardduty_is_enabled",
"s3_bucket_secure_transport_policy",
"s3_bucket_server_access_logging_enabled",
"securityhub_enabled"
]
},
{
"Id": "164_312_e_2_ii",
"Name": "164.312(e)(2)(ii) Encryption",
"Description": "Implement a mechanism to encrypt electronic protected health information whenever deemed appropriate.",
"Attributes": [
{
"ItemId": "164_312_e_2_ii",
"Section": "164.312 Technical Safeguards",
"Service": "aws"
}
],
"Checks": [
"cloudtrail_kms_encryption_enabled",
"dynamodb_accelerator_cluster_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"ec2_ebs_volume_encryption",
"ec2_ebs_default_encryption",
"efs_encryption_at_rest_enabled",
"eks_cluster_kms_cmk_encryption_in_secrets_enabled",
"elb_ssl_listeners",
"opensearch_service_domains_encryption_at_rest_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"rds_instance_storage_encrypted",
"rds_instance_storage_encrypted",
"redshift_cluster_audit_logging",
"s3_bucket_default_encryption",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"sagemaker_notebook_instance_encryption_enabled",
"sns_topics_kms_encryption_at_rest_enabled"
]
}
]
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,293 @@
{
"Framework": "PCI",
"Version": "3.2.1",
"Provider": "AWS",
"Description": "The Payment Card Industry Data Security Standard (PCI DSS) is a proprietary information security standard. It's administered by the PCI Security Standards Council, which was founded by American Express, Discover Financial Services, JCB International, MasterCard Worldwide, and Visa Inc. PCI DSS applies to entities that store, process, or transmit cardholder data (CHD) or sensitive authentication data (SAD). This includes, but isn't limited to, merchants, processors, acquirers, issuers, and service providers. The PCI DSS is mandated by the card brands and administered by the Payment Card Industry Security Standards Council.",
"Requirements": [
{
"Id": "autoscaling",
"Name": "Auto Scaling",
"Description": "This control checks whether your Auto Scaling groups that are associated with a load balancer are using Elastic Load Balancing health checks. PCI DSS does not require load balancing or highly available configurations. However, this check aligns with AWS best practices.",
"Attributes": [
{
"ItemId": "autoscaling",
"Service": "autoscaling"
}
],
"Checks": []
},
{
"Id": "cloudtrail",
"Name": "CloudTrail",
"Description": "This section contains recommendations for configuring CloudTrail resources and options.",
"Attributes": [
{
"ItemId": "cloudtrail",
"Service": "cloudtrail"
}
],
"Checks": [
"cloudtrail_kms_encryption_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_log_file_validation_enabled",
"cloudtrail_cloudwatch_logging_enabled"
]
},
{
"Id": "codebuild",
"Name": "CodeBuild",
"Description": "This section contains recommendations for configuring CodeBuild resources and options.",
"Attributes": [
{
"ItemId": "codebuild",
"Service": "codebuild"
}
],
"Checks": []
},
{
"Id": "config",
"Name": "Config",
"Description": "This section contains recommendations for configuring AWS Config.",
"Attributes": [
{
"ItemId": "config",
"Service": "config"
}
],
"Checks": [
"config_recorder_all_regions_enabled"
]
},
{
"Id": "cw",
"Name": "CloudWatch",
"Description": "This section contains recommendations for configuring CloudWatch resources and options.",
"Attributes": [
{
"ItemId": "cw",
"Service": "cloudwatch"
}
],
"Checks": [
"cloudwatch_log_metric_filter_root_usage"
]
},
{
"Id": "dms",
"Name": "DMS",
"Description": "This section contains recommendations for configuring AWS DMS resources and options.",
"Attributes": [
{
"ItemId": "dms",
"Service": "dms"
}
],
"Checks": []
},
{
"Id": "ec2",
"Name": "EC2",
"Description": "This section contains recommendations for configuring EC2 resources and options.",
"Attributes": [
{
"ItemId": "ec2",
"Service": "ec2"
}
],
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_securitygroup_default_restrict_traffic",
"ec2_elastic_ip_unassgined",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"vpc_flow_logs_enabled"
]
},
{
"Id": "elbv2",
"Name": "ELBV2",
"Description": "This section contains recommendations for configuring Elastic Load Balancer resources and options.",
"Attributes": [
{
"ItemId": "elbv2",
"Service": "elbv2"
}
],
"Checks": []
},
{
"Id": "elasticsearch",
"Name": "Elasticsearch",
"Description": "This section contains recommendations for configuring Elasticsearch resources and options.",
"Attributes": [
{
"ItemId": "elasticsearch",
"Service": "elasticsearch"
}
],
"Checks": [
"opensearch_service_domains_encryption_at_rest_enabled"
]
},
{
"Id": "guardduty",
"Name": "GuardDuty",
"Description": "This section contains recommendations for configuring AWS GuardDuty resources and options.",
"Attributes": [
{
"ItemId": "guardduty",
"Service": "guardduty"
}
],
"Checks": [
"guardduty_is_enabled"
]
},
{
"Id": "iam",
"Name": "IAM",
"Description": "This section contains recommendations for configuring AWS IAM resources and options.",
"Attributes": [
{
"ItemId": "iam",
"Service": "iam"
}
],
"Checks": [
"iam_no_root_access_key",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_user_mfa_enabled_console_access",
"iam_disable_90_days_credentials",
"iam_password_policy_minimum_length_14",
"iam_password_policy_lowercase",
"iam_password_policy_number",
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase"
]
},
{
"Id": "kms",
"Name": "KMS",
"Description": "This section contains recommendations for configuring AWS KMS resources and options.",
"Attributes": [
{
"ItemId": "kms",
"Service": "kms"
}
],
"Checks": [
"kms_cmk_rotation_enabled"
]
},
{
"Id": "lambda",
"Name": "Lambda",
"Description": "This section contains recommendations for configuring Lambda resources and options.",
"Attributes": [
{
"ItemId": "lambda",
"Service": "lambda"
}
],
"Checks": [
"awslambda_function_url_public",
"awslambda_function_not_publicly_accessible"
]
},
{
"Id": "opensearch",
"Name": "OpenSearch",
"Description": "This section contains recommendations for configuring OpenSearch resources and options.",
"Attributes": [
{
"ItemId": "opensearch",
"Service": "opensearch"
}
],
"Checks": [
"opensearch_service_domains_encryption_at_rest_enabled"
]
},
{
"Id": "rds",
"Name": "RDS",
"Description": "This section contains recommendations for configuring AWS RDS resources and options.",
"Attributes": [
{
"ItemId": "rds",
"Service": "rds"
}
],
"Checks": [
"rds_snapshots_public_access",
"rds_instance_no_public_access"
]
},
{
"Id": "redshift",
"Name": "Redshift",
"Description": "This section contains recommendations for configuring AWS Redshift resources and options.",
"Attributes": [
{
"ItemId": "redshift",
"Service": "redshift"
}
],
"Checks": [
"redshift_cluster_public_access"
]
},
{
"Id": "s3",
"Name": "S3",
"Description": "This section contains recommendations for configuring AWS S3 resources and options.",
"Attributes": [
{
"ItemId": "s3",
"Service": "s3"
}
],
"Checks": [
"s3_bucket_policy_public_write_access",
"s3_bucket_public_access",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"s3_bucket_public_access"
]
},
{
"Id": "sagemaker",
"Name": "SageMaker",
"Description": "This section contains recommendations for configuring AWS Sagemaker resources and options.",
"Attributes": [
{
"ItemId": "sagemaker",
"Service": "sagemaker"
}
],
"Checks": [
"sagemaker_notebook_instance_without_direct_internet_access_configured"
]
},
{
"Id": "ssm",
"Name": "SSM",
"Description": "This section contains recommendations for configuring AWS SSM resources and options.",
"Attributes": [
{
"ItemId": "ssm",
"Service": "ssm"
}
],
"Checks": [
"ssm_managed_compliant_patching",
"ssm_managed_compliant_patching",
"ec2_instance_managed_by_ssm"
]
}
]
}

View File

@@ -0,0 +1,198 @@
{
"Framework": "RBI-Cyber-Security-Framework",
"Version": "",
"Provider": "AWS",
"Description": "The Reserve Bank had prescribed a set of baseline cyber security controls for primary (Urban) cooperative banks (UCBs) in October 2018. On further examination, it has been decided to prescribe a comprehensive cyber security framework for the UCBs, as a graded approach, based on their digital depth and interconnectedness with the payment systems landscape, digital products offered by them and assessment of cyber security risk. The framework would mandate implementation of progressively stronger security measures based on the nature, variety and scale of digital product offerings of banks.",
"Requirements": [
{
"Id": "annex_i_1_1",
"Name": "Annex I (1.1)",
"Description": "UCBs should maintain an up-to-date business IT Asset Inventory Register containing the following fields, as a minimum: a) Details of the IT Asset (viz., hardware/software/network devices, key personnel, services, etc.), b. Details of systems where customer data are stored, c. Associated business applications, if any, d. Criticality of the IT asset (For example, High/Medium/Low).",
"Attributes": [
{
"ItemId": "annex_i_1_1",
"Service": "ec2"
}
],
"Checks": [
"ec2_instance_managed_by_ssm"
]
},
{
"Id": "annex_i_1_3",
"Name": "Annex I (1.3)",
"Description": "Appropriately manage and provide protection within and outside UCB/network, keeping in mind how the data/information is stored, transmitted, processed, accessed and put to use within/outside the UCBs network, and level of risk they are exposed to depending on the sensitivity of the data/information.",
"Attributes": [
{
"ItemId": "annex_i_1_3",
"Service": "aws"
}
],
"Checks": [
"acm_certificates_expiration_check",
"apigateway_client_certificate_enabled",
"cloudtrail_kms_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
"ec2_ebs_volume_encryption",
"ec2_ebs_public_snapshot",
"ec2_ebs_volume_encryption",
"ec2_instance_public_ip",
"efs_encryption_at_rest_enabled",
"elbv2_insecure_ssl_ciphers",
"elb_ssl_listeners",
"emr_cluster_master_nodes_no_public_ip",
"opensearch_service_domains_encryption_at_rest_enabled",
"opensearch_service_domains_node_to_node_encryption_enabled",
"kms_cmk_rotation_enabled",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
"cloudwatch_log_group_kms_encryption_enabled",
"rds_instance_storage_encrypted",
"rds_instance_no_public_access",
"rds_instance_storage_encrypted",
"rds_snapshots_public_access",
"redshift_cluster_audit_logging",
"redshift_cluster_public_access",
"s3_bucket_default_encryption",
"s3_bucket_default_encryption",
"s3_bucket_secure_transport_policy",
"s3_bucket_public_access",
"s3_bucket_policy_public_write_access",
"s3_bucket_public_access",
"sagemaker_notebook_instance_without_direct_internet_access_configured",
"sagemaker_notebook_instance_encryption_enabled",
"sns_topics_kms_encryption_at_rest_enabled",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "annex_i_5_1",
"Name": "Annex I (5.1)",
"Description": "The firewall configurations should be set to the highest security level and evaluation of critical device (such as firewall, network switches, security devices, etc.) configurations should be done periodically.",
"Attributes": [
{
"ItemId": "annex_i_5_1",
"Service": "aws"
}
],
"Checks": [
"apigateway_waf_acl_attached",
"elbv2_waf_acl_attached",
"ec2_securitygroup_default_restrict_traffic",
"ec2_networkacl_allow_ingress_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_networkacl_allow_ingress_any_port"
]
},
{
"Id": "annex_i_6",
"Name": "Annex I (6)",
"Description": "Put in place systems and processes to identify, track, manage and monitor the status of patches to servers, operating system and application software running at the systems used by the UCB officials (end-users). Implement and update antivirus protection for all servers and applicable end points preferably through a centralised system.",
"Attributes": [
{
"ItemId": "annex_i_6",
"Service": "aws"
}
],
"Checks": [
"guardduty_no_high_severity_findings",
"rds_instance_minor_version_upgrade_enabled",
"redshift_cluster_automatic_upgrades",
"ssm_managed_compliant_patching",
"ssm_managed_compliant_patching"
]
},
{
"Id": "annex_i_7_1",
"Name": "Annex I (7.1)",
"Description": "Disallow administrative rights on end-user workstations/PCs/laptops and provide access rights on a need to know and need to do basis.",
"Attributes": [
{
"ItemId": "annex_i_7_1",
"Service": "iam"
}
],
"Checks": [
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
{
"Id": "annex_i_7_2",
"Name": "Annex I (7.2)",
"Description": "Passwords should be set as complex and lengthy and users should not use same passwords for all the applications/systems/devices.",
"Attributes": [
{
"ItemId": "annex_i_7_2",
"Service": "iam"
}
],
"Checks": [
"iam_password_policy_reuse_24"
]
},
{
"Id": "annex_i_7_3",
"Name": "Annex I (7.3)",
"Description": "Remote Desktop Protocol (RDP) which allows others to access the computer remotely over a network or over the internet should be always disabled and should be enabled only with the approval of the authorised officer of the UCB. Logs for such remote access shall be enabled and monitored for suspicious activities.",
"Attributes": [
{
"ItemId": "annex_i_7_3",
"Service": "vpc"
}
],
"Checks": [
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22"
]
},
{
"Id": "annex_i_7_4",
"Name": "Annex I (7.4)",
"Description": "Implement appropriate (e.g. centralised) systems and controls to allow, manage, log and monitor privileged/super user/administrative access to critical systems (servers/databases, applications, network devices etc.)",
"Attributes": [
{
"ItemId": "annex_i_7_4",
"Service": "aws"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"cloudwatch_log_group_retention_policy_specific_days_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"opensearch_service_domains_cloudwatch_logging_enabled",
"rds_instance_integration_cloudwatch_logs",
"redshift_cluster_audit_logging",
"s3_bucket_server_access_logging_enabled",
"securityhub_enabled",
"vpc_flow_logs_enabled"
]
},
{
"Id": "annex_i_12",
"Name": "Annex I (12)",
"Description": "Take periodic back up of the important data and store this data off line (i.e., transferring important files to a storage device that can be detached from a computer/system after copying all the files).",
"Attributes": [
{
"ItemId": "annex_i_12",
"Service": "aws"
}
],
"Checks": [
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
}
]
}

View File

@@ -0,0 +1,914 @@
{
"Framework": "SOC2",
"Version": "",
"Provider": "AWS",
"Description": "System and Organization Controls (SOC), defined by the American Institute of Certified Public Accountants (AICPA), is the name of a set of reports that's produced during an audit. It's intended for use by service organizations (organizations that provide information systems as a service to other organizations) to issue validated reports of internal controls over those information systems to the users of those services. The reports focus on controls grouped into five categories known as Trust Service Principles.",
"Requirements": [
{
"Id": "cc_1_1",
"Name": "CC1.1 COSO Principle 1: The entity demonstrates a commitment to integrity and ethical values",
"Description": "Sets the Tone at the Top - The board of directors and management, at all levels, demonstrate through their directives, actions, and behavior the importance of integrity and ethical values to support the functioning of the system of internal control.Establishes Standards of Conduct - The expectations of the board of directors and senior management concerning integrity and ethical values are defined in the entitys standards of conduct and understood at all levels of the entity and by outsourced service providers and business partners. Evaluates Adherence to Standards of Conduct - Processes are in place to evaluate the performance of individuals and teams against the entitys expected standards of conduct. Addresses Deviations in a Timely Manner - Deviations from the entitys expected standards of conduct are identified and remedied in a timely and consistent manner.",
"Attributes": [
{
"ItemId": "cc_1_1",
"Section": "CC1.0 - Common Criteria Related to Control Environment",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_1_2",
"Name": "CC1.2 COSO Principle 2: The board of directors demonstrates independence from management and exercises oversight of the development and performance of internal control",
"Description": "Establishes Oversight Responsibilities - The board of directors identifies and accepts its oversight responsibilities in relation to established requirements and expectations. Applies Relevant Expertise - The board of directors defines, maintains, and periodically evaluates the skills and expertise needed among its members to enable them to ask probing questions of senior management and take commensurate action. Operates Independently - The board of directors has sufficient members who are independent from management and objective in evaluations and decision making. Additional point of focus specifically related to all engagements using the trust services criteria: Supplements Board Expertise - The board of directors supplements its expertise relevant to security, availability, processing integrity, confidentiality, and privacy, as needed, through the use of a subcommittee or consultants.",
"Attributes": [
{
"ItemId": "cc_1_2",
"Section": "CC1.0 - Common Criteria Related to Control Environment",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_1_3",
"Name": "CC1.3 COSO Principle 3: Management establishes, with board oversight, structures, reporting lines, and appropriate authorities and responsibilities in the pursuit of objectives",
"Description": "Considers All Structures of the Entity - Management and the board of directors consider the multiple structures used (including operating units, legal entities, geographic distribution, and outsourced service providers) to support the achievement of objectives. Establishes Reporting Lines - Management designs and evaluates lines of reporting for each entity structure to enable execution of authorities and responsibilities and flow of information to manage the activities of the entity. Defines, Assigns, and Limits Authorities and Responsibilities - Management and the board of directors delegate authority, define responsibilities, and use appropriate processes and technology to assign responsibility and segregate duties as necessary at the various levels of the organization. Additional points of focus specifically related to all engagements using the trust services criteria: Addresses Specific Requirements When Defining Authorities and Responsibilities—Management and the board of directors consider requirements relevant to security, availability, processing integrity, confidentiality, and privacy when defining authorities and responsibilities. Considers Interactions With External Parties When Establishing Structures, Reporting Lines, Authorities, and Responsibilities — Management and the board of directors consider the need for the entity to interact with and monitor the activities of external parties when establishing structures, reporting lines, authorities, and responsibilities.",
"Attributes": [
{
"ItemId": "cc_1_3",
"Section": "CC1.0 - Common Criteria Related to Control Environment",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges",
"iam_disable_90_days_credentials"
]
},
{
"Id": "cc_1_4",
"Name": "CC1.4 COSO Principle 4: The entity demonstrates a commitment to attract, develop, and retain competent individuals in alignment with objectives",
"Description": "Establishes Policies and Practices - Policies and practices reflect expectations of competence necessary to support the achievement of objectives. Evaluates Competence and Addresses Shortcomings - The board of directors and management evaluate competence across the entity and in outsourced service providers in relation to established policies and practices and act as necessary to address shortcomings.Attracts, Develops, and Retains Individuals - The entity provides the mentoring and training needed to attract, develop, and retain sufficient and competent personnel and outsourced service providers to support the achievement of objectives.Plans and Prepares for Succession - Senior management and the board of directors develop contingency plans for assignments of responsibility important for internal control.Additional point of focus specifically related to all engagements using the trust services criteria:Considers the Background of Individuals - The entity considers the background of potential and existing personnel, contractors, and vendor employees when determining whether to employ and retain the individuals.Considers the Technical Competency of Individuals - The entity considers the technical competency of potential and existing personnel, contractors, and vendor employees when determining whether to employ and retain the individuals.Provides Training to Maintain Technical Competencies - The entity provides training programs, including continuing education and training, to ensure skill sets and technical competency of existing personnel, contractors, and vendor employees are developed and maintained.",
"Attributes": [
{
"ItemId": "cc_1_4",
"Section": "CC1.0 - Common Criteria Related to Control Environment",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_1_5",
"Name": "CC1.5 COSO Principle 5: The entity holds individuals accountable for their internal control responsibilities in the pursuit of objectives",
"Description": "Enforces Accountability Through Structures, Authorities, and Responsibilities - Management and the board of directors establish the mechanisms to communicate and hold individuals accountable for performance of internal control responsibilities across the entity and implement corrective action as necessary. Establishes Performance Measures, Incentives, and Rewards - Management and the board of directors establish performance measures, incentives, and other rewards appropriate for responsibilities at all levels of the entity, reflecting appropriate dimensions of performance and expected standards of conduct, and considering the achievement of both short-term and longer-term objectives.Evaluates Performance Measures, Incentives, and Rewards for Ongoing Relevance - Management and the board of directors align incentives and rewards with the fulfillment of internal control responsibilities in the achievement of objectives.Considers Excessive Pressures - Management and the board of directors evaluate and adjust pressures associated with the achievement of objectives as they assign responsibilities, develop performance measures, and evaluate performance. Evaluates Performance and Rewards or Disciplines Individuals - Management and the board of directors evaluate performance of internal control responsibilities, including adherence to standards of conduct and expected levels of competence, and provide rewards or exercise disciplinary action, as appropriate.",
"Attributes": [
{
"ItemId": "cc_1_5",
"Section": "CC1.0 - Common Criteria Related to Control Environment",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_2_1",
"Name": "CC2.1 COSO Principle 13: The entity obtains or generates and uses relevant, quality information to support the functioning of internal control",
"Description": "Identifies Information Requirements - A process is in place to identify the information required and expected to support the functioning of the other components of internal control and the achievement of the entitys objectives. Captures Internal and External Sources of Data - Information systems capture internal and external sources of data. Processes Relevant Data Into Information - Information systems process and transform relevant data into information. Maintains Quality Throughout Processing - Information systems produce information that is timely, current, accurate, complete, accessible, protected, verifiable, and retained. Information is reviewed to assess its relevance in supporting the internal control components.",
"Attributes": [
{
"ItemId": "cc_2_1",
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_multi_region_enabled",
"config_recorder_all_regions_enabled"
]
},
{
"Id": "cc_2_2",
"Name": "CC2.2 COSO Principle 14: The entity internally communicates information, including objectives and responsibilities for internal control, necessary to support the functioning of internal control",
"Description": "Communicates Internal Control Information - A process is in place to communicate required information to enable all personnel to understand and carry out their internal control responsibilities. Communicates With the Board of Directors - Communication exists between management and the board of directors so that both have information needed to fulfill their roles with respect to the entitys objectives. Provides Separate Communication Lines - Separate communication channels, such as whistle-blower hotlines, are in place and serve as fail-safe mechanisms to enable anonymous or confidential communication when normal channels are inoperative or ineffective. Selects Relevant Method of Communication - The method of communication considers the timing, audience, and nature of the information. Additional point of focus specifically related to all engagements using the trust services criteria: Communicates Responsibilities - Entity personnel with responsibility for designing, developing, implementing,operating, maintaining, or monitoring system controls receive communications about their responsibilities, including changes in their responsibilities, and have the information necessary to carry out those responsibilities. Communicates Information on Reporting Failures, Incidents, Concerns, and Other Matters—Entity personnel are provided with information on how to report systems failures, incidents, concerns, and other complaints to personnel. Communicates Objectives and Changes to Objectives - The entity communicates its objectives and changes to those objectives to personnel in a timely manner. Communicates Information to Improve Security Knowledge and Awareness - The entity communicates information to improve security knowledge and awareness and to model appropriate security behaviors to personnel through a security awareness training program. Additional points of focus that apply only when an engagement using the trust services criteria is performed at the system level: Communicates Information About System Operation and Boundaries - The entity prepares and communicates information about the design and operation of the system and its boundaries to authorized personnel to enable them to understand their role in the system and the results of system operation. Communicates System Objectives - The entity communicates its objectives to personnel to enable them to carry out their responsibilities. Communicates System Changes - System changes that affect responsibilities or the achievement of the entity's objectives are communicated in a timely manner.",
"Attributes": [
{
"ItemId": "cc_2_2",
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_2_3",
"Name": "CC2.3 COSO Principle 15: The entity communicates with external parties regarding matters affecting the functioning of internal control",
"Description": "Communicates to External Parties - Processes are in place to communicate relevant and timely information to external parties, including shareholders, partners, owners, regulators, customers, financial analysts, and other external parties. Enables Inbound Communications - Open communication channels allow input from customers, consumers, suppliers, external auditors, regulators, financial analysts, and others, providing management and the board of directors with relevant information. Communicates With the Board of Directors - Relevant information resulting from assessments conducted by external parties is communicated to the board of directors. Provides Separate Communication Lines - Separate communication channels, such as whistle-blower hotlines, are in place and serve as fail-safe mechanisms to enable anonymous or confidential communication when normal channels are inoperative or ineffective. Selects Relevant Method of Communication - The method of communication considers the timing, audience, and nature of the communication and legal, regulatory, and fiduciary requirements and expectations. Communicates Objectives Related to Confidentiality and Changes to Objectives - The entity communicates, to external users, vendors, business partners and others whose products and services are part of the system, objectives and changes to objectives related to confidentiality. Additional point of focus that applies only to an engagement using the trust services criteria for privacy: Communicates Objectives Related to Privacy and Changes to Objectives - The entity communicates, to external users, vendors, business partners and others whose products and services are part of the system, objectives related to privacy and changes to those objectives. Additional points of focus that apply only when an engagement using the trust services criteria is performed at the system level: Communicates Information About System Operation and Boundaries - The entity prepares and communicates information about the design and operation of the system and its boundaries to authorized external users to permit users to understand their role in the system and the results of system operation. Communicates System Objectives - The entity communicates its system objectives to appropriate external users. Communicates System Responsibilities - External users with responsibility for designing, developing, implementing, operating, maintaining, and monitoring system controls receive communications about their responsibilities and have the information necessary to carry out those responsibilities. Communicates Information on Reporting System Failures, Incidents, Concerns, and Other Matters - External users are provided with information on how to report systems failures, incidents, concerns, and other complaints to appropriate personnel.",
"Attributes": [
{
"ItemId": "cc_2_3",
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_3_1",
"Name": "CC3.1 COSO Principle 6: The entity specifies objectives with sufficient clarity to enable the identification and assessment of risks relating to objectives",
"Description": "Operations Ojectives:Reflects Management's Choices - Operations objectives reflect management's choices about structure, industry considerations, and performance of the entity.Considers Tolerances for Risk - Management considers the acceptable levels of variation relative to the achievement of operations objectives.External Financial Reporting Objectives:Complies With Applicable Accounting Standards - Financial reporting objectives are consistent with accounting principles suitable and available for that entity. The accounting principles selected are appropriate in the circumstances.External Nonfinancial Reporting Objectives:Complies With Externally Established Frameworks - Management establishes objectives consistent with laws and regulations or standards and frameworks of recognized external organizations.Reflects Entity Activities - External reporting reflects the underlying transactions and events within a range of acceptable limits.Considers the Required Level of Precision—Management reflects the required level of precision and accuracy suitable for user needs and based on criteria established by third parties in nonfinancial reporting.Internal Reporting Objectives:Reflects Management's Choices - Internal reporting provides management with accurate and complete information regarding management's choices and information needed in managing the entity.Considers the Required Level of Precision—Management reflects the required level of precision and accuracy suitable for user needs in nonfinancial reporting objectives and materiality within financial reporting objectives.Reflects Entity Activities—Internal reporting reflects the underlying transactions and events within a range of acceptable limits.Compliance Objectives:Reflects External Laws and Regulations - Laws and regulations establish minimum standards of conduct, which the entity integrates into compliance objectives.Considers Tolerances for Risk - Management considers the acceptable levels of variation relative to the achievement of operations objectives.Additional point of focus specifically related to all engagements using the trust services criteria: Establishes Sub-objectives to Support Objectives—Management identifies sub-objectives related to security, availability, processing integrity, confidentiality, and privacy to support the achievement of the entitys objectives related to reporting, operations, and compliance.",
"Attributes": [
{
"ItemId": "cc_3_1",
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"guardduty_is_enabled",
"securityhub_enabled",
"config_recorder_all_regions_enabled"
]
},
{
"Id": "cc_3_2",
"Name": "CC3.2 COSO Principle 7: The entity identifies risks to the achievement of its objectives across the entity and analyzes risks as a basis for determining how the risks should be managed",
"Description": "Includes Entity, Subsidiary, Division, Operating Unit, and Functional Levels - The entity identifies and assesses risk at the entity, subsidiary, division, operating unit, and functional levels relevant to the achievement of objectives.Analyzes Internal and External Factors - Risk identification considers both internal and external factors and their impact on the achievement of objectives.Involves Appropriate Levels of Management - The entity puts into place effective risk assessment mechanisms that involve appropriate levels of management.Estimates Significance of Risks Identified - Identified risks are analyzed through a process that includes estimating the potential significance of the risk.Determines How to Respond to Risks - Risk assessment includes considering how the risk should be managed and whether to accept, avoid, reduce, or share the risk.Additional points of focus specifically related to all engagements using the trust services criteria:Identifies and Assesses Criticality of Information Assets and Identifies Threats and Vulnerabilities - The entity's risk identification and assessment process includes (1) identifying information assets, including physical devices and systems, virtual devices, software, data and data flows, external information systems, and organizational roles; (2) assessing the criticality of those information assets; (3) identifying the threats to the assets from intentional (including malicious) and unintentional acts and environmental events; and (4) identifying the vulnerabilities of the identified assets.",
"Attributes": [
{
"ItemId": "cc_3_2",
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"ec2_instance_managed_by_ssm",
"ssm_managed_compliant_patching",
"guardduty_no_high_severity_findings",
"guardduty_is_enabled",
"ssm_managed_compliant_patching"
]
},
{
"Id": "cc_3_3",
"Name": "CC3.3 COSO Principle 8: The entity considers the potential for fraud in assessing risks to the achievement of objectives",
"Description": "Considers Various Types of Fraud - The assessment of fraud considers fraudulent reporting, possible loss of assets, and corruption resulting from the various ways that fraud and misconduct can occur.Assesses Incentives and Pressures - The assessment of fraud risks considers incentives and pressures.Assesses Opportunities - The assessment of fraud risk considers opportunities for unauthorized acquisition,use, or disposal of assets, altering the entitys reporting records, or committing other inappropriate acts.Assesses Attitudes and Rationalizations - The assessment of fraud risk considers how management and other personnel might engage in or justify inappropriate actions.Additional point of focus specifically related to all engagements using the trust services criteria: Considers the Risks Related to the Use of IT and Access to Information - The assessment of fraud risks includes consideration of threats and vulnerabilities that arise specifically from the use of IT and access to information.",
"Attributes": [
{
"ItemId": "cc_3_3",
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_3_4",
"Name": "CC3.4 COSO Principle 9: The entity identifies and assesses changes that could significantly impact the system of internal control",
"Description": "Assesses Changes in the External Environment - The risk identification process considers changes to the regulatory, economic, and physical environment in which the entity operates.Assesses Changes in the Business Model - The entity considers the potential impacts of new business lines, dramatically altered compositions of existing business lines, acquired or divested business operations on the system of internal control, rapid growth, changing reliance on foreign geographies, and new technologies.Assesses Changes in Leadership - The entity considers changes in management and respective attitudes and philosophies on the system of internal control.Assess Changes in Systems and Technology - The risk identification process considers changes arising from changes in the entitys systems and changes in the technology environment.Assess Changes in Vendor and Business Partner Relationships - The risk identification process considers changes in vendor and business partner relationships.",
"Attributes": [
{
"ItemId": "cc_3_4",
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
"Service": "config",
"Soc_Type": "automated"
}
],
"Checks": [
"config_recorder_all_regions_enabled"
]
},
{
"Id": "cc_4_1",
"Name": "CC4.1 COSO Principle 16: The entity selects, develops, and performs ongoing and/or separate evaluations to ascertain whether the components of internal control are present and functioning",
"Description": "Considers a Mix of Ongoing and Separate Evaluations - Management includes a balance of ongoing and separate evaluations.Considers Rate of Change - Management considers the rate of change in business and business processes when selecting and developing ongoing and separate evaluations.Establishes Baseline Understanding - The design and current state of an internal control system are used to establish a baseline for ongoing and separate evaluations.Uses Knowledgeable Personnel - Evaluators performing ongoing and separate evaluations have sufficient knowledge to understand what is being evaluated.Integrates With Business Processes - Ongoing evaluations are built into the business processes and adjust to changing conditions.Adjusts Scope and Frequency—Management varies the scope and frequency of separate evaluations depending on risk.Objectively Evaluates - Separate evaluations are performed periodically to provide objective feedback.Considers Different Types of Ongoing and Separate Evaluations - Management uses a variety of different types of ongoing and separate evaluations, including penetration testing, independent certification made against established specifications (for example, ISO certifications), and internal audit assessments.",
"Attributes": [
{
"ItemId": "cc_4_1",
"Section": "CC4.0 - Monitoring Activities",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_4_2",
"Name": "CC4.2 COSO Principle 17: The entity evaluates and communicates internal control deficiencies in a timely manner to those parties responsible for taking corrective action, including senior management and the board of directors, as appropriate",
"Description": "Assesses Results - Management and the board of directors, as appropriate, assess results of ongoing and separate evaluations.Communicates Deficiencies - Deficiencies are communicated to parties responsible for taking corrective action and to senior management and the board of directors, as appropriate.Monitors Corrective Action - Management tracks whether deficiencies are remedied on a timely basis.",
"Attributes": [
{
"ItemId": "cc_4_2",
"Section": "CC4.0 - Monitoring Activities",
"Service": "guardduty",
"Soc_Type": "automated"
}
],
"Checks": [
"guardduty_is_enabled",
"guardduty_no_high_severity_findings"
]
},
{
"Id": "cc_5_1",
"Name": "CC5.1 COSO Principle 10: The entity selects and develops control activities that contribute to the mitigation of risks to the achievement of objectives to acceptable levels",
"Description": "Integrates With Risk Assessment - Control activities help ensure that risk responses that address and mitigate risks are carried out.Considers Entity-Specific Factors - Management considers how the environment, complexity, nature, and scope of its operations, as well as the specific characteristics of its organization, affect the selection and development of control activities.Determines Relevant Business Processes - Management determines which relevant business processes require control activities.Evaluates a Mix of 2017 Data Submitted Types - Control activities include a range and variety of controls and may include a balance of approaches to mitigate risks, considering both manual and automated controls, and preventive and detective controls.Considers at What Level Activities Are Applied - Management considers control activities at various levels in the entity.Addresses Segregation of Duties - Management segregates incompatible duties, and where such segregation is not practical, management selects and develops alternative control activities.",
"Attributes": [
{
"ItemId": "cc_5_1",
"Section": "CC5.0 - Control Activities",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_5_2",
"Name": "CC5.2 COSO Principle 11: The entity also selects and develops general control activities over technology to support the achievement of objectives",
"Description": "Determines Dependency Between the Use of Technology in Business Processes and Technology General Controls - Management understands and determines the dependency and linkage between business processes, automated control activities, and technology general controls.Establishes Relevant Technology Infrastructure Control Activities - Management selects and develops control activities over the technology infrastructure, which are designed and implemented to help ensure the completeness, accuracy, and availability of technology processing.Establishes Relevant Security Management Process Controls Activities - Management selects and develops control activities that are designed and implemented to restrict technology access rights to authorized users commensurate with their job responsibilities and to protect the entitys assets from external threats.Establishes Relevant Technology Acquisition, Development, and Maintenance Process Control Activities - Management selects and develops control activities over the acquisition, development, and maintenance of technology and its infrastructure to achieve managements objectives.",
"Attributes": [
{
"ItemId": "cc_5_2",
"Section": "CC5.0 - Control Activities",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_5_3",
"Name": "CCC5.3 COSO Principle 12: The entity deploys control activities through policies that establish what is expected and in procedures that put policies into action",
"Description": "Establishes Policies and Procedures to Support Deployment of Management s Directives - Management establishes control activities that are built into business processes and employees day-to-day activities through policies establishing what is expected and relevant procedures specifying actions.Establishes Responsibility and Accountability for Executing Policies and Procedures - Management establishes responsibility and accountability for control activities with management (or other designated personnel) of the business unit or function in which the relevant risks reside.Performs in a Timely Manner - Responsible personnel perform control activities in a timely manner as defined by the policies and procedures.Takes Corrective Action - Responsible personnel investigate and act on matters identified as a result of executing control activities.Performs Using Competent Personnel - Competent personnel with sufficient authority perform control activities with diligence and continuing focus.Reassesses Policies and Procedures - Management periodically reviews control activities to determine their continued relevance and refreshes them when necessary.",
"Attributes": [
{
"ItemId": "cc_5_3",
"Section": "CC5.0 - Control Activities",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_6_1",
"Name": "CC6.1 The entity implements logical access security software, infrastructure, and architectures over protected information assets to protect them from security events to meet the entity's objectives",
"Description": "Identifies and Manages the Inventory of Information Assets - The entity identifies, inventories, classifies, and manages information assets.Restricts Logical Access - Logical access to information assets, including hardware, data (at-rest, during processing, or in transmission), software, administrative authorities, mobile devices, output, and offline system components is restricted through the use of access control software and rule sets.Identifies and Authenticates Users - Persons, infrastructure and software are identified and authenticated prior to accessing information assets, whether locally or remotely.Considers Network Segmentation - Network segmentation permits unrelated portions of the entity's information system to be isolated from each other.Manages Points of Access - Points of access by outside entities and the types of data that flow through the points of access are identified, inventoried, and managed. The types of individuals and systems using each point of access are identified, documented, and managed.Restricts Access to Information Assets - Combinations of data classification, separate data structures, port restrictions, access protocol restrictions, user identification, and digital certificates are used to establish access control rules for information assets.Manages Identification and Authentication - Identification and authentication requirements are established, documented, and managed for individuals and systems accessing entity information, infrastructure and software.Manages Credentials for Infrastructure and Software - New internal and external infrastructure and software are registered, authorized, and documented prior to being granted access credentials and implemented on the network or access point. Credentials are removed and access is disabled when access is no longer required or the infrastructure and software are no longer in use.Uses Encryption to Protect Data - The entity uses encryption to supplement other measures used to protect data-at-rest, when such protections are deemed appropriate based on assessed risk.Protects Encryption Keys - Processes are in place to protect encryption keys during generation, storage, use, and destruction.",
"Attributes": [
{
"ItemId": "cc_6_1",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "s3",
"Soc_Type": "automated"
}
],
"Checks": [
"s3_bucket_public_access"
]
},
{
"Id": "cc_6_2",
"Name": "CC6.2 Prior to issuing system credentials and granting system access, the entity registers and authorizes new internal and external users whose access is administered by the entity",
"Description": "Prior to issuing system credentials and granting system access, the entity registers and authorizes new internal and external users whose access is administered by the entity. For those users whose access is administered by the entity, user system credentials are removed when user access is no longer authorized.Controls Access Credentials to Protected Assets - Information asset access credentials are created based on an authorization from the system's asset owner or authorized custodian.Removes Access to Protected Assets When Appropriate - Processes are in place to remove credential access when an individual no longer requires such access.Reviews Appropriateness of Access Credentials - The appropriateness of access credentials is reviewed on a periodic basis for unnecessary and inappropriate individuals with credentials.",
"Attributes": [
{
"ItemId": "cc_6_2",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "rds",
"Soc_Type": "automated"
}
],
"Checks": [
"rds_instance_no_public_access"
]
},
{
"Id": "cc_6_3",
"Name": "CC6.3 The entity authorizes, modifies, or removes access to data, software, functions, and other protected information assets based on roles, responsibilities, or the system design and changes, giving consideration to the concepts of least privilege and segregation of duties, to meet the entitys objectives",
"Description": "Creates or Modifies Access to Protected Information Assets - Processes are in place to create or modify access to protected information assets based on authorization from the assets owner.Removes Access to Protected Information Assets - Processes are in place to remove access to protected information assets when an individual no longer requires access.Uses Role-Based Access Controls - Role-based access control is utilized to support segregation of incompatible functions.",
"Attributes": [
{
"ItemId": "cc_6_3",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "iam",
"Soc_Type": "automated"
}
],
"Checks": [
"iam_policy_no_administrative_privileges"
]
},
{
"Id": "cc_6_4",
"Name": "CC6.4 The entity restricts physical access to facilities and protected information assets to authorized personnel to meet the entitys objectives",
"Description": "Creates or Modifies Physical Access - Processes are in place to create or modify physical access to facilities such as data centers, office spaces, and work areas, based on authorization from the system's asset owner.Removes Physical Access - Processes are in place to remove access to physical resources when an individual no longer requires access.Reviews Physical Access - Processes are in place to periodically review physical access to ensure consistency with job responsibilities.",
"Attributes": [
{
"ItemId": "cc_6_4",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_6_5",
"Name": "CC6.5 The entity discontinues logical and physical protections over physical assets only after the ability to read or recover data and software from those assets has been diminished and is no longer required to meet the entitys objectives",
"Description": "Identifies Data and Software for Disposal - Procedures are in place to identify data and software stored on equipment to be disposed and to render such data and software unreadable.Removes Data and Software From Entity Control - Procedures are in place to remove data and software stored on equipment to be removed from the physical control of the entity and to render such data and software unreadable.",
"Attributes": [
{
"ItemId": "cc_6_5",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_6_6",
"Name": "CC6.6 The entity implements logical access security measures to protect against threats from sources outside its system boundaries",
"Description": "Restricts Access — The types of activities that can occur through a communication channel (for example, FTP site, router port) are restricted.Protects Identification and Authentication Credentials — Identification and authentication credentials are protected during transmission outside its system boundaries.Requires Additional Authentication or Credentials — Additional authentication information or credentials are required when accessing the system from outside its boundaries.Implements Boundary Protection Systems — Boundary protection systems (for example, firewalls, demilitarized zones, and intrusion detection systems) are implemented to protect external access points from attempts and unauthorized access and are monitored to detect such attempts.",
"Attributes": [
{
"ItemId": "cc_6_6",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "ec2",
"Soc_Type": "automated"
}
],
"Checks": [
"ec2_instance_public_ip"
]
},
{
"Id": "cc_6_7",
"Name": "CC6.7 The entity restricts the transmission, movement, and removal of information to authorized internal and external users and processes, and protects it during transmission, movement, or removal to meet the entitys objectives",
"Description": "Restricts the Ability to Perform Transmission - Data loss prevention processes and technologies are used to restrict ability to authorize and execute transmission, movement and removal of information.Uses Encryption Technologies or Secure Communication Channels to Protect Data - Encryption technologies or secured communication channels are used to protect transmission of data and other communications beyond connectivity access points.Protects Removal Media - Encryption technologies and physical asset protections are used for removable media (such as USB drives and back-up tapes), as appropriate.Protects Mobile Devices - Processes are in place to protect mobile devices (such as laptops, smart phones and tablets) that serve as information assets.",
"Attributes": [
{
"ItemId": "cc_6_7",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "acm",
"Soc_Type": "automated"
}
],
"Checks": [
"acm_certificates_expiration_check"
]
},
{
"Id": "cc_6_8",
"Name": "CC6.8 The entity implements controls to prevent or detect and act upon the introduction of unauthorized or malicious software to meet the entitys objectives",
"Description": "Restricts Application and Software Installation - The ability to install applications and software is restricted to authorized individuals.Detects Unauthorized Changes to Software and Configuration Parameters - Processes are in place to detect changes to software and configuration parameters that may be indicative of unauthorized or malicious software.Uses a Defined Change Control Process - A management-defined change control process is used for the implementation of software.Uses Antivirus and Anti-Malware Software - Antivirus and anti-malware software is implemented and maintained to provide for the interception or detection and remediation of malware.Scans Information Assets from Outside the Entity for Malware and Other Unauthorized Software - Procedures are in place to scan information assets that have been transferred or returned to the entitys custody for malware and other unauthorized software and to remove any items detected prior to its implementation on the network.",
"Attributes": [
{
"ItemId": "cc_6_8",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"guardduty_is_enabled",
"securityhub_enabled"
]
},
{
"Id": "cc_7_1",
"Name": "CC7.1 To meet its objectives, the entity uses detection and monitoring procedures to identify (1) changes to configurations that result in the introduction of new vulnerabilities, and (2) susceptibilities to newly discovered vulnerabilities",
"Description": "Uses Defined Configuration Standards - Management has defined configuration standards.Monitors Infrastructure and Software - The entity monitors infrastructure and software for noncompliance with the standards, which could threaten the achievement of the entity's objectives.Implements Change-Detection Mechanisms - The IT system includes a change-detection mechanism (for example, file integrity monitoring tools) to alert personnel to unauthorized modifications of critical system files, configuration files, or content files.Detects Unknown or Unauthorized Components - Procedures are in place to detect the introduction of unknown or unauthorized components.Conducts Vulnerability Scans - The entity conducts vulnerability scans designed to identify potential vulnerabilities or misconfigurations on a periodic basis and after any significant change in the environment and takes action to remediate identified deficiencies on a timely basis.",
"Attributes": [
{
"ItemId": "cc_7_1",
"Section": "CC7.0 - System Operations",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"guardduty_is_enabled",
"securityhub_enabled",
"ec2_instance_managed_by_ssm",
"ssm_managed_compliant_patching"
]
},
{
"Id": "cc_7_2",
"Name": "CC7.2 The entity monitors system components and the operation of those components for anomalies that are indicative of malicious acts, natural disasters, and errors affecting the entity's ability to meet its objectives; anomalies are analyzed to determine whether they represent security events",
"Description": "Implements Detection Policies, Procedures, and Tools - Detection policies and procedures are defined and implemented, and detection tools are implemented on Infrastructure and software to identify anomalies in the operation or unusual activity on systems. Procedures may include (1) a defined governance process for security event detection and management that includes provision of resources; (2) use of intelligence sources to identify newly discovered threats and vulnerabilities; and (3) logging of unusual system activities.Designs Detection Measures - Detection measures are designed to identify anomalies that could result from actual or attempted (1) compromise of physical barriers; (2) unauthorized actions of authorized personnel; (3) use of compromised identification and authentication credentials; (4) unauthorized access from outside the system boundaries; (5) compromise of authorized external parties; and (6) implementation or connection of unauthorized hardware and software.Implements Filters to Analyze Anomalies - Management has implemented procedures to filter, summarize, and analyze anomalies to identify security events.Monitors Detection Tools for Effective Operation - Management has implemented processes to monitor the effectiveness of detection tools.",
"Attributes": [
{
"ItemId": "cc_7_2",
"Section": "CC7.0 - System Operations",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"cloudtrail_cloudwatch_logging_enabled",
"cloudwatch_changes_to_network_acls_alarm_configured",
"cloudwatch_changes_to_network_gateways_alarm_configured",
"cloudwatch_changes_to_network_route_tables_alarm_configured",
"cloudwatch_changes_to_vpcs_alarm_configured",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"s3_bucket_server_access_logging_enabled",
"rds_instance_integration_cloudwatch_logs",
"cloudtrail_multi_region_enabled",
"securityhub_enabled",
"cloudwatch_log_group_retention_policy_specific_days_enabled",
"cloudtrail_multi_region_enabled",
"redshift_cluster_audit_logging",
"vpc_flow_logs_enabled",
"ec2_instance_imdsv2_enabled",
"guardduty_is_enabled",
"apigateway_logging_enabled",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22"
]
},
{
"Id": "cc_7_3",
"Name": "CC7.3 The entity evaluates security events to determine whether they could or have resulted in a failure of the entity to meet its objectives (security incidents) and, if so, takes actions to prevent or address such failures",
"Description": "Responds to Security Incidents - Procedures are in place for responding to security incidents and evaluating the effectiveness of those policies and procedures on a periodic basis.Communicates and Reviews Detected Security Events - Detected security events are communicated to and reviewed by the individuals responsible for the management of the security program and actions are taken, if necessary.Develops and Implements Procedures to Analyze Security Incidents - Procedures are in place to analyze security incidents and determine system impact.Assesses the Impact on Personal Information - Detected security events are evaluated to determine whether they could or did result in the unauthorized disclosure or use of personal information and whether there has been a failure to comply with applicable laws or regulations.Determines Personal Information Used or Disclosed - When an unauthorized use or disclosure of personal information has occurred, the affected information is identified.",
"Attributes": [
{
"ItemId": "cc_7_3",
"Section": "CC7.0 - System Operations",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"cloudwatch_log_group_kms_encryption_enabled",
"cloudtrail_log_file_validation_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"guardduty_is_enabled",
"apigateway_logging_enabled",
"rds_instance_integration_cloudwatch_logs",
"securityhub_enabled",
"cloudwatch_changes_to_network_acls_alarm_configured",
"cloudwatch_changes_to_network_gateways_alarm_configured",
"cloudwatch_changes_to_network_route_tables_alarm_configured",
"cloudwatch_changes_to_vpcs_alarm_configured",
"elbv2_logging_enabled",
"elb_logging_enabled",
"s3_bucket_server_access_logging_enabled",
"cloudwatch_log_group_retention_policy_specific_days_enabled",
"vpc_flow_logs_enabled",
"guardduty_no_high_severity_findings"
]
},
{
"Id": "cc_7_4",
"Name": "CC7.4 The entity responds to identified security incidents by executing a defined incident response program to understand, contain, remediate, and communicate security incidents, as appropriate",
"Description": "Assigns Roles and Responsibilities - Roles and responsibilities for the design, implementation, maintenance, and execution of the incident response program are assigned, including the use of external resources when necessary.Contains Security Incidents - Procedures are in place to contain security incidents that actively threaten entity objectives.Mitigates Ongoing Security Incidents - Procedures are in place to mitigate the effects of ongoing security incidents.Ends Threats Posed by Security Incidents - Procedures are in place to end the threats posed by security incidents through closure of the vulnerability, removal of unauthorized access, and other remediation actions.Restores Operations - Procedures are in place to restore data and business operations to an interim state that permits the achievement of entity objectives. Develops and Implements Communication Protocols for Security Incidents - Protocols for communicating security incidents and actions taken to affected parties are developed and implemented to meet the entity's objectives.Obtains Understanding of Nature of Incident and Determines Containment Strategy - An understanding of the nature (for example, the method by which the incident occurred and the affected system resources) and severity of the security incident is obtained to determine the appropriate containment strategy, including (1) a determination of the appropriate response time frame, and (2) the determination and execution of the containment approach.Remediates Identified Vulnerabilities - Identified vulnerabilities are remediated through the development and execution of remediation activities.Communicates Remediation Activities - Remediation activities are documented and communicated in accordance with the incident response program.Evaluates the Effectiveness of Incident Response - The design of incident response activities is evaluated for effectiveness on a periodic basis.Periodically Evaluates Incidents - Periodically, management reviews incidents related to security, availability, processing integrity, confidentiality, and privacy and identifies the need for system changes based on incident patterns and root causes. Communicates Unauthorized Use and Disclosure - Events that resulted in unauthorized use or disclosure of personal information are communicated to the data subjects, legal and regulatory authorities, and others as required.Application of Sanctions - The conduct of individuals and organizations operating under the authority of the entity and involved in the unauthorized use or disclosure of personal information is evaluated and, if appropriate, sanctioned in accordance with entity policies and legal and regulatory requirements.",
"Attributes": [
{
"ItemId": "cc_7_4",
"Section": "CC7.0 - System Operations",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"cloudwatch_changes_to_network_acls_alarm_configured",
"cloudwatch_changes_to_network_gateways_alarm_configured",
"cloudwatch_changes_to_network_route_tables_alarm_configured",
"cloudwatch_changes_to_vpcs_alarm_configured",
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"efs_have_backup_enabled",
"guardduty_is_enabled",
"guardduty_no_high_severity_findings",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning",
"securityhub_enabled"
]
},
{
"Id": "cc_7_5",
"Name": "CC7.5 The entity identifies, develops, and implements activities to recover from identified security incidents",
"Description": "Restores the Affected Environment - The activities restore the affected environment to functional operation by rebuilding systems, updating software, installing patches, and changing configurations, as needed.Communicates Information About the Event - Communications about the nature of the incident, recovery actions taken, and activities required for the prevention of future security events are made to management and others as appropriate (internal and external).Determines Root Cause of the Event - The root cause of the event is determined.Implements Changes to Prevent and Detect Recurrences - Additional architecture or changes to preventive and detective controls, or both, are implemented to prevent and detect recurrences on a timely basis.Improves Response and Recovery Procedures - Lessons learned are analyzed, and the incident response plan and recovery procedures are improved.Implements Incident Recovery Plan Testing - Incident recovery plan testing is performed on a periodic basis. The testing includes (1) development of testing scenarios based on threat likelihood and magnitude; (2) consideration of relevant system components from across the entity that can impair availability; (3) scenarios that consider the potential for the lack of availability of key personnel; and (4) revision of continuity plans and systems based on test results.",
"Attributes": [
{
"ItemId": "cc_7_5",
"Section": "CC7.0 - System Operations",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_8_1",
"Name": "CC8.1 The entity authorizes, designs, develops or acquires, configures, documents, tests, approves, and implements changes to infrastructure, data, software, and procedures to meet its objectives",
"Description": "Manages Changes Throughout the System Lifecycle - A process for managing system changes throughout the lifecycle of the system and its components (infrastructure, data, software and procedures) is used to support system availability and processing integrity.Authorizes Changes - A process is in place to authorize system changes prior to development.Designs and Develops Changes - A process is in place to design and develop system changes.Documents Changes - A process is in place to document system changes to support ongoing maintenance of the system and to support system users in performing their responsibilities.Tracks System Changes - A process is in place to track system changes prior to implementation.Configures Software - A process is in place to select and implement the configuration parameters used to control the functionality of software.Tests System Changes - A process is in place to test system changes prior to implementation.Approves System Changes - A process is in place to approve system changes prior to implementation.Deploys System Changes - A process is in place to implement system changes.Identifies and Evaluates System Changes - Objectives affected by system changes are identified, and the ability of the modified system to meet the objectives is evaluated throughout the system development life cycle.Identifies Changes in Infrastructure, Data, Software, and Procedures Required to Remediate Incidents - Changes in infrastructure, data, software, and procedures required to remediate incidents to continue to meet objectives are identified, and the change process is initiated upon identification.Creates Baseline Configuration of IT Technology - A baseline configuration of IT and control systems is created and maintained.Provides for Changes Necessary in Emergency Situations - A process is in place for authorizing, designing, testing, approving and implementing changes necessary in emergency situations (that is, changes that need to be implemented in an urgent timeframe).Protects Confidential Information - The entity protects confidential information during system design, development, testing, implementation, and change processes to meet the entitys objectives related to confidentiality.Protects Personal Information - The entity protects personal information during system design, development, testing, implementation, and change processes to meet the entitys objectives related to privacy.",
"Attributes": [
{
"ItemId": "cc_8_1",
"Section": "CC8.0 - Change Management",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"config_recorder_all_regions_enabled"
]
},
{
"Id": "cc_9_1",
"Name": "CC9.1 The entity identifies, selects, and develops risk mitigation activities for risks arising from potential business disruptions",
"Description": "Considers Mitigation of Risks of Business Disruption - Risk mitigation activities include the development of planned policies, procedures, communications, and alternative processing solutions to respond to, mitigate, and recover from security events that disrupt business operations. Those policies and procedures include monitoring processes and information and communications to meet the entity's objectives during response, mitigation, and recovery efforts.Considers the Use of Insurance to Mitigate Financial Impact Risks - The risk management activities consider the use of insurance to offset the financial impact of loss events that would otherwise impair the ability of the entity to meet its objectives.",
"Attributes": [
{
"ItemId": "cc_9_1",
"Section": "CC9.0 - Risk Mitigation",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_9_2",
"Name": "CC9.2 The entity assesses and manages risks associated with vendors and business partners",
"Description": "Establishes Requirements for Vendor and Business Partner Engagements - The entity establishes specific requirements for a vendor and business partner engagement that includes (1) scope of services and product specifications, (2) roles and responsibilities, (3) compliance requirements, and (4) service levels.Assesses Vendor and Business Partner Risks - The entity assesses, on a periodic basis, the risks that vendors and business partners (and those entities vendors and business partners) represent to the achievement of the entity's objectives.Assigns Responsibility and Accountability for Managing Vendors and Business Partners - The entity assigns responsibility and accountability for the management of risks associated with vendors and business partners.Establishes Communication Protocols for Vendors and Business Partners - The entity establishes communication and resolution protocols for service or product issues related to vendors and business partners.Establishes Exception Handling Procedures From Vendors and Business Partners - The entity establishes exception handling procedures for service or product issues related to vendors and business partners.Assesses Vendor and Business Partner Performance - The entity periodically assesses the performance of vendors and business partners.Implements Procedures for Addressing Issues Identified During Vendor and Business Partner Assessments - The entity implements procedures for addressing issues identified with vendor and business partner relationships.Implements Procedures for Terminating Vendor and Business Partner Relationships - The entity implements procedures for terminating vendor and business partner relationships.Obtains Confidentiality Commitments from Vendors and Business Partners - The entity obtains confidentiality commitments that are consistent with the entitys confidentiality commitments and requirements from vendors and business partners who have access to confidential information.Assesses Compliance With Confidentiality Commitments of Vendors and Business Partners - On a periodic and as-needed basis, the entity assesses compliance by vendors and business partners with the entitys confidentiality commitments and requirements.Obtains Privacy Commitments from Vendors and Business Partners - The entity obtains privacy commitments, consistent with the entitys privacy commitments and requirements, from vendors and business partners who have access to personal information.Assesses Compliance with Privacy Commitments of Vendors and Business Partners - On a periodic and as-needed basis, the entity assesses compliance by vendors and business partners with the entitys privacy commitments and requirements and takes corrective action as necessary.",
"Attributes": [
{
"ItemId": "cc_9_2",
"Section": "CC9.0 - Risk Mitigation",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_a_1_1",
"Name": "A1.1 The entity maintains, monitors, and evaluates current processing capacity and use of system components (infrastructure, data, and software) to manage capacity demand and to enable the implementation of additional capacity to help meet its objectives",
"Description": "Measures Current Usage - The use of the system components is measured to establish a baseline for capacity management and to use when evaluating the risk of impaired availability due to capacity constraints.Forecasts Capacity - The expected average and peak use of system components is forecasted and compared to system capacity and associated tolerances. Forecasting considers capacity in the event of the failure of system components that constrain capacity.Makes Changes Based on Forecasts - The system change management process is initiated when forecasted usage exceeds capacity tolerances.",
"Attributes": [
{
"ItemId": "cc_a_1_1",
"Section": "CCA1.0 - Additional Criterial for Availability",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_a_1_2",
"Name": "A1.2 The entity authorizes, designs, develops or acquires, implements, operates, approves, maintains, and monitors environmental protections, software, data back-up processes, and recovery infrastructure to meet its objectives",
"Description": "Measures Current Usage - The use of the system components is measured to establish a baseline for capacity management and to use when evaluating the risk of impaired availability due to capacity constraints.Forecasts Capacity - The expected average and peak use of system components is forecasted and compared to system capacity and associated tolerances. Forecasting considers capacity in the event of the failure of system components that constrain capacity.Makes Changes Based on Forecasts - The system change management process is initiated when forecasted usage exceeds capacity tolerances.",
"Attributes": [
{
"ItemId": "cc_a_1_2",
"Section": "CCA1.0 - Additional Criterial for Availability",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"apigateway_logging_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"dynamodb_tables_pitr_enabled",
"dynamodb_tables_pitr_enabled",
"efs_have_backup_enabled",
"efs_have_backup_enabled",
"elbv2_logging_enabled",
"elb_logging_enabled",
"rds_instance_backup_enabled",
"rds_instance_backup_enabled",
"rds_instance_integration_cloudwatch_logs",
"rds_instance_backup_enabled",
"redshift_cluster_automated_snapshot",
"s3_bucket_object_versioning"
]
},
{
"Id": "cc_a_1_3",
"Name": "A1.3 The entity tests recovery plan procedures supporting system recovery to meet its objectives",
"Description": "Implements Business Continuity Plan Testing - Business continuity plan testing is performed on a periodic basis. The testing includes (1) development of testing scenarios based on threat likelihood and magnitude; (2) consideration of system components from across the entity that can impair the availability; (3) scenarios that consider the potential for the lack of availability of key personnel; and (4) revision of continuity plans and systems based on test results.Tests Integrity and Completeness of Back-Up Data - The integrity and completeness of back-up information is tested on a periodic basis.",
"Attributes": [
{
"ItemId": "cc_a_1_3",
"Section": "CCA1.0 - Additional Criterial for Availability",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "cc_c_1_1",
"Name": "C1.1 The entity identifies and maintains confidential information to meet the entitys objectives related to confidentiality",
"Description": "Identifies Confidential information - Procedures are in place to identify and designate confidential information when it is received or created and to determine the period over which the confidential information is to be retained.Protects Confidential Information from Destruction - Procedures are in place to protect confidential information from erasure or destruction during the specified retention period of the information",
"Attributes": [
{
"ItemId": "cc_c_1_1",
"Section": "CCC1.0 - Additional Criterial for Confidentiality",
"Service": "aws",
"Soc_Type": "automated"
}
],
"Checks": [
"rds_instance_deletion_protection"
]
},
{
"Id": "cc_c_1_2",
"Name": "C1.2 The entity disposes of confidential information to meet the entitys objectives related to confidentiality",
"Description": "Identifies Confidential Information for Destruction - Procedures are in place to identify confidential information requiring destruction when the end of the retention period is reached.Destroys Confidential Information - Procedures are in place to erase or otherwise destroy confidential information that has been identified for destruction.",
"Attributes": [
{
"ItemId": "cc_c_1_2",
"Section": "CCC1.0 - Additional Criterial for Confidentiality",
"Service": "s3",
"Soc_Type": "automated"
}
],
"Checks": [
"s3_bucket_object_versioning"
]
},
{
"Id": "p_1_1",
"Name": "P1.1 The entity provides notice to data subjects about its privacy practices to meet the entitys objectives related to privacy",
"Description": "The entity provides notice to data subjects about its privacy practices to meet the entitys objectives related to privacy. The notice is updated and communicated to data subjects in a timely manner for changes to the entitys privacy practices, including changes in the use of personal information, to meet the entitys objectives related to privacy.Communicates to Data Subjects - Notice is provided to data subjects regarding the following:Purpose for collecting personal informationChoice and consentTypes of personal information collectedMethods of collection (for example, use of cookies or other tracking techniques)Use, retention, and disposalAccessDisclosure to third partiesSecurity for privacyQuality, including data subjects responsibilities for qualityMonitoring and enforcementIf personal information is collected from sources other than the individual, such sources are described in the privacy notice.Provides Notice to Data Subjects - Notice is provided to data subjects (1) at or before the time personal information is collected or as soon as practical thereafter, (2) at or before the entity changes its privacy notice or as soon as practical thereafter, or (3) before personal information is used for new purposes not previously identified.Covers Entities and Activities in Notice - An objective description of the entities and activities covered is included in the entitys privacy notice.Uses Clear and Conspicuous Language - The entitys privacy notice is conspicuous and uses clear language.",
"Attributes": [
{
"ItemId": "p_1_1",
"Section": "P1.0 - Privacy Criteria Related to Notice and Communication of Objectives Related to Privacy",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_2_1",
"Name": "P2.1 The entity communicates choices available regarding the collection, use, retention, disclosure, and disposal of personal information to the data subjects and the consequences, if any, of each choice",
"Description": "The entity communicates choices available regarding the collection, use, retention, disclosure, and disposal of personal information to the data subjects and the consequences, if any, of each choice. Explicit consent for the collection, use, retention, disclosure, and disposal of personal information is obtained from data subjects or other authorized persons, if required. Such consent is obtained only for the intended purpose of the information to meet the entitys objectives related to privacy. The entitys basis for determining implicit consent for the collection, use, retention, disclosure, and disposal of personal information is documented.Communicates to Data Subjects - Data subjects are informed (a) about the choices available to them with respect to the collection, use, and disclosure of personal information and (b) that implicit or explicit consent is required to collect, use, and disclose personal information, unless a law or regulation specifically requires or allows otherwise.Communicates Consequences of Denying or Withdrawing Consent - When personal information is collected, data subjects are informed of the consequences of refusing to provide personal information or denying or withdrawing consent to use personal information for purposes identified in the notice.Obtains Implicit or Explicit Consent - Implicit or explicit consent is obtained from data subjects at or before the time personal information is collected or soon thereafter. The individuals preferences expressed in his or her consent are confirmed and implemented.Documents and Obtains Consent for New Purposes and Uses - If information that was previously collected is to be used for purposes not previously identified in the privacy notice, the new purpose is documented, the data subject is notified, and implicit or explicit consent is obtained prior to such new use or purpose.Obtains Explicit Consent for Sensitive Information - Explicit consent is obtained directly from the data subject when sensitive personal information is collected, used, or disclosed, unless a law or regulation specifically requires otherwise.",
"Attributes": [
{
"ItemId": "p_2_1",
"Section": "P2.0 - Privacy Criteria Related to Choice and Consent",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_3_1",
"Name": "P3.1 Personal information is collected consistent with the entitys objectives related to privacy",
"Description": "Limits the Collection of Personal Information - The collection of personal information is limited to that necessary to meet the entitys objectives.Collects Information by Fair and Lawful Means - Methods of collecting personal information are reviewed by management before they are implemented to confirm that personal information is obtained (a) fairly, without intimidation or deception, and (b) lawfully, adhering to all relevant rules of law, whether derived from statute or common law, relating to the collection of personal information.Collects Information From Reliable Sources - Management confirms that third parties from whom personal information is collected (that is, sources other than the individual) are reliable sources that collect information fairly and lawfully.Informs Data Subjects When Additional Information Is Acquired - Data subjects are informed if the entity develops or acquires additional information about them for its use.",
"Attributes": [
{
"ItemId": "p_3_1",
"Section": "P3.0 - Privacy Criteria Related to Collection",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_3_2",
"Name": "P3.2 For information requiring explicit consent, the entity communicates the need for such consent, as well as the consequences of a failure to provide consent for the request for personal information, and obtains the consent prior to the collection of the information to meet the entitys objectives related to privacy",
"Description": "Obtains Explicit Consent for Sensitive Information - Explicit consent is obtained directly from the data subject when sensitive personal information is collected, used, or disclosed, unless a law or regulation specifically requires otherwise.Documents Explicit Consent to Retain Information - Documentation of explicit consent for the collection, use, or disclosure of sensitive personal information is retained in accordance with objectives related to privacy.",
"Attributes": [
{
"ItemId": "p_3_2",
"Section": "P3.0 - Privacy Criteria Related to Collection",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_4_1",
"Name": "P4.1 The entity limits the use of personal information to the purposes identified in the entitys objectives related to privacy",
"Description": "Uses Personal Information for Intended Purposes - Personal information is used only for the intended purposes for which it was collected and only when implicit or explicit consent has been obtained unless a law or regulation specifically requires otherwise.",
"Attributes": [
{
"ItemId": "p_4_1",
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_4_2",
"Name": "P4.2 The entity retains personal information consistent with the entitys objectives related to privacy",
"Description": "Retains Personal Information - Personal information is retained for no longer than necessary to fulfill the stated purposes, unless a law or regulation specifically requires otherwise.Protects Personal Information - Policies and procedures have been implemented to protect personal information from erasure or destruction during the specified retention period of the information.",
"Attributes": [
{
"ItemId": "p_4_2",
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_4_3",
"Name": "P4.3 The entity securely disposes of personal information to meet the entitys objectives related to privacy",
"Description": "Captures, Identifies, and Flags Requests for Deletion - Requests for deletion of personal information are captured, and information related to the requests is identified and flagged for destruction to meet the entitys objectives related to privacy.Disposes of, Destroys, and Redacts Personal Information - Personal information no longer retained is anonymized, disposed of, or destroyed in a manner that prevents loss, theft, misuse, or unauthorized access.Destroys Personal Information - Policies and procedures are implemented to erase or otherwise destroy personal information that has been identified for destruction.",
"Attributes": [
{
"ItemId": "p_4_3",
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_5_1",
"Name": "P5.1 The entity grants identified and authenticated data subjects the ability to access their stored personal information for review and, upon request, provides physical or electronic copies of that information to data subjects to meet the entitys objectives related to privacy",
"Description": "The entity grants identified and authenticated data subjects the ability to access their stored personal information for review and, upon request, provides physical or electronic copies of that information to data subjects to meet the entitys objectives related to privacy. If access is denied, data subjects are informed of the denial and reason for such denial, as required, to meet the entitys objectives related to privacy.Authenticates Data Subjects Identity - The identity of data subjects who request access to their personal information is authenticated before they are given access to that information.Permits Data Subjects Access to Their Personal Information - Data subjects are able to determine whether the entity maintains personal information about them and, upon request, may obtain access to their personal information.Provides Understandable Personal Information Within Reasonable Time - Personal information is provided to data subjects in an understandable form, in a reasonable time frame, and at a reasonable cost, if any.Informs Data Subjects If Access Is Denied - When data subjects are denied access to their personal information, the entity informs them of the denial and the reason for the denial in a timely manner, unless prohibited by law or regulation.",
"Attributes": [
{
"ItemId": "p_5_1",
"Section": "P5.0 - Privacy Criteria Related to Access",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_5_2",
"Name": "P5.2 The entity corrects, amends, or appends personal information based on information provided by data subjects and communicates such information to third parties, as committed or required, to meet the entitys objectives related to privacy",
"Description": "The entity corrects, amends, or appends personal information based on information provided by data subjects and communicates such information to third parties, as committed or required, to meet the entitys objectives related to privacy. If a request for correction is denied, data subjects are informed of the denial and reason for such denial to meet the entitys objectives related to privacy.Communicates Denial of Access Requests - Data subjects are informed, in writing, of the reason a request for access to their personal information was denied, the source of the entitys legal right to deny such access, if applicable, and the individuals right, if any, to challenge such denial, as specifically permitted or required by law or regulation.Permits Data Subjects to Update or Correct Personal Information - Data subjects are able to update or correct personal information held by the entity. The entity provides such updated or corrected information to third parties that were previously provided with the data subjects personal information consistent with the entitys objective related to privacy.Communicates Denial of Correction Requests - Data subjects are informed, in writing, about the reason a request for correction of personal information was denied and how they may appeal.",
"Attributes": [
{
"ItemId": "p_5_2",
"Section": "P5.0 - Privacy Criteria Related to Access",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_6_1",
"Name": "P6.1 The entity discloses personal information to third parties with the explicit consent of data subjects, and such consent is obtained prior to disclosure to meet the entitys objectives related to privacy",
"Description": "Communicates Privacy Policies to Third Parties - Privacy policies or other specific instructions or requirements for handling personal information are communicated to third parties to whom personal information is disclosed.Discloses Personal Information Only When Appropriate - Personal information is disclosed to third parties only for the purposes for which it was collected or created and only when implicit or explicit consent has been obtained from the data subject, unless a law or regulation specifically requires otherwise.Discloses Personal Information Only to Appropriate Third Parties - Personal information is disclosed only to third parties who have agreements with the entity to protect personal information in a manner consistent with the relevant aspects of the entitys privacy notice or other specific instructions or requirements. The entity has procedures in place to evaluate that the third parties have effective controls to meet the terms of the agreement, instructions, or requirements.",
"Attributes": [
{
"ItemId": "p_6_1",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_6_2",
"Name": "P6.2 The entity creates and retains a complete, accurate, and timely record of authorized disclosures of personal information to meet the entitys objectives related to privacy",
"Description": "Creates and Retains Record of Authorized Disclosures - The entity creates and maintains a record of authorized disclosures of personal information that is complete, accurate, and timely.",
"Attributes": [
{
"ItemId": "p_6_2",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_6_3",
"Name": "P6.3 The entity creates and retains a complete, accurate, and timely record of detected or reported unauthorized disclosures (including breaches) of personal information to meet the entitys objectives related to privacy",
"Description": "Creates and Retains Record of Detected or Reported Unauthorized Disclosures - The entity creates and maintains a record of detected or reported unauthorized disclosures of personal information that is complete, accurate, and timely.",
"Attributes": [
{
"ItemId": "p_6_3",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_6_4",
"Name": "P6.4 The entity obtains privacy commitments from vendors and other third parties who have access to personal information to meet the entitys objectives related to privacy",
"Description": "The entity obtains privacy commitments from vendors and other third parties who have access to personal information to meet the entitys objectives related to privacy. The entity assesses those parties compliance on a periodic and as-needed basis and takes corrective action, if necessary.Discloses Personal Information Only to Appropriate Third Parties - Personal information is disclosed only to third parties who have agreements with the entity to protect personal information in a manner consistent with the relevant aspects of the entitys privacy notice or other specific instructions or requirements. The entity has procedures in place to evaluate that the third parties have effective controls to meet the terms of the agreement, instructions, or requirements.Remediates Misuse of Personal Information by a Third Party - The entity takes remedial action in response to misuse of personal information by a third party to whom the entity has transferred such information.",
"Attributes": [
{
"ItemId": "p_6_4",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_6_5",
"Name": "P6.5 The entity obtains commitments from vendors and other third parties with access to personal information to notify the entity in the event of actual or suspected unauthorized disclosures of personal information",
"Description": "The entity obtains commitments from vendors and other third parties with access to personal information to notify the entity in the event of actual or suspected unauthorized disclosures of personal information. Such notifications are reported to appropriate personnel and acted on in accordance with established incident response procedures to meet the entitys objectives related to privacy.Remediates Misuse of Personal Information by a Third Party - The entity takes remedial action in response to misuse of personal information by a third party to whom the entity has transferred such information.Reports Actual or Suspected Unauthorized Disclosures - A process exists for obtaining commitments from vendors and other third parties to report to the entity actual or suspected unauthorized disclosures of personal information.",
"Attributes": [
{
"ItemId": "p_6_5",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_6_6",
"Name": "P6.6 The entity provides notification of breaches and incidents to affected data subjects, regulators, and others to meet the entitys objectives related to privacy",
"Description": "Remediates Misuse of Personal Information by a Third Party - The entity takes remedial action in response to misuse of personal information by a third party to whom the entity has transferred such information. Reports Actual or Suspected Unauthorized Disclosures - A process exists for obtaining commitments from vendors and other third parties to report to the entity actual or suspected unauthorized disclosures of personal information.",
"Attributes": [
{
"ItemId": "p_6_6",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_6_7",
"Name": "P6.7 The entity provides data subjects with an accounting of the personal information held and disclosure of the data subjects personal information, upon the data subjects request, to meet the entitys objectives related to privacy",
"Description": "Identifies Types of Personal Information and Handling Process - The types of personal information and sensitive personal information and the related processes, systems, and third parties involved in the handling of such information are identified. Captures, Identifies, and Communicates Requests for Information - Requests for an accounting of personal information held and disclosures of the data subjects personal information are captured, and information related to the requests is identified and communicated to data subjects to meet the entitys objectives related to privacy.",
"Attributes": [
{
"ItemId": "p_6_7",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_7_1",
"Name": "P7.1 The entity collects and maintains accurate, up-to-date, complete, and relevant personal information to meet the entitys objectives related to privacy",
"Description": "Ensures Accuracy and Completeness of Personal Information - Personal information is accurate and complete for the purposes for which it is to be used. Ensures Relevance of Personal Information - Personal information is relevant to the purposes for which it is to be used.",
"Attributes": [
{
"ItemId": "p_7_1",
"Section": "P7.0 - Privacy Criteria Related to Quality",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
},
{
"Id": "p_8_1",
"Name": "P8.1 The entity implements a process for receiving, addressing, resolving, and communicating the resolution of inquiries, complaints, and disputes from data subjects and others and periodically monitors compliance to meet the entitys objectives related to privacy",
"Description": "The entity implements a process for receiving, addressing, resolving, and communicating the resolution of inquiries, complaints, and disputes from data subjects and others and periodically monitors compliance to meet the entitys objectives related to privacy. Corrections and other necessary actions related to identified deficiencies are made or taken in a timely manner.Communicates to Data Subjects—Data subjects are informed about how to contact the entity with inquiries, complaints, and disputes.Addresses Inquiries, Complaints, and Disputes - A process is in place to address inquiries, complaints, and disputes.Documents and Communicates Dispute Resolution and Recourse - Each complaint is addressed, and the resolution is documented and communicated to the individual.Documents and Reports Compliance Review Results - Compliance with objectives related to privacy are reviewed and documented, and the results of such reviews are reported to management. If problems are identified, remediation plans are developed and implemented.Documents and Reports Instances of Noncompliance - Instances of noncompliance with objectives related to privacy are documented and reported and, if needed, corrective and disciplinary measures are taken on a timely basis.Performs Ongoing Monitoring - Ongoing procedures are performed for monitoring the effectiveness of controls over personal information and for taking timely corrective actions when necessary.",
"Attributes": [
{
"ItemId": "p_8_1",
"Section": "P8.0 - Privacy Criteria Related to Monitoring and Enforcement",
"Service": "aws",
"Soc_Type": "manual"
}
],
"Checks": []
}
]
}

View File

@@ -9,7 +9,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "3.1.4"
prowler_version = "3.2.2"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
@@ -18,7 +18,28 @@ banner_color = "\033[1;92m"
# Compliance
compliance_specification_dir = "./compliance"
available_compliance_frameworks = [
"ens_rd2022_aws",
"cis_1.4_aws",
"cis_1.5_aws",
"aws_audit_manager_control_tower_guardrails_aws",
"aws_foundational_security_best_practices_aws",
"cisa_aws",
"fedramp_low_revision_4_aws",
"fedramp_moderate_revision_4_aws",
"ffiec_aws",
"gdpr_aws",
"gxp_eu_annex_11_aws",
"gxp_21_cfr_part_11_aws",
"hipaa_aws",
"nist_800_53_revision_4_aws",
"nist_800_53_revision_5_aws",
"nist_800_171_revision_2_aws",
"nist_csf_1.1_aws",
"pci_3.2.1_aws",
"rbi_cyber_security_framework_aws",
"soc2_aws",
]
# AWS services-regions matrix json
aws_services_json_file = "aws_regions_by_service.json"

View File

@@ -136,17 +136,28 @@ def list_categories(provider: str, bulk_checks_metadata: dict) -> set():
def print_categories(categories: set):
print(
f"There are {Fore.YELLOW}{len(categories)}{Style.RESET_ALL} available categories: \n"
)
categories_num = len(categories)
plural_string = f"There are {Fore.YELLOW}{categories_num}{Style.RESET_ALL} available categories: \n"
singular_string = f"There is {Fore.YELLOW}{categories_num}{Style.RESET_ALL} available category: \n"
message = plural_string if categories_num > 1 else singular_string
print(message)
for category in categories:
print(f"- {category}")
def print_services(service_list: set):
print(
f"There are {Fore.YELLOW}{len(service_list)}{Style.RESET_ALL} available services: \n"
services_num = len(service_list)
plural_string = (
f"There are {Fore.YELLOW}{services_num}{Style.RESET_ALL} available services: \n"
)
singular_string = (
f"There is {Fore.YELLOW}{services_num}{Style.RESET_ALL} available service: \n"
)
message = plural_string if services_num > 1 else singular_string
print(message)
for service in service_list:
print(f"- {service}")
@@ -154,9 +165,12 @@ def print_services(service_list: set):
def print_compliance_frameworks(
bulk_compliance_frameworks: dict,
):
print(
f"There are {Fore.YELLOW}{len(bulk_compliance_frameworks.keys())}{Style.RESET_ALL} available Compliance Frameworks: \n"
)
frameworks_num = len(bulk_compliance_frameworks.keys())
plural_string = f"There are {Fore.YELLOW}{frameworks_num}{Style.RESET_ALL} available Compliance Frameworks: \n"
singular_string = f"There is {Fore.YELLOW}{frameworks_num}{Style.RESET_ALL} available Compliance Framework: \n"
message = plural_string if frameworks_num > 1 else singular_string
print(message)
for framework in bulk_compliance_frameworks.keys():
print(f"\t- {Fore.YELLOW}{framework}{Style.RESET_ALL}")
@@ -165,17 +179,18 @@ def print_compliance_requirements(
bulk_compliance_frameworks: dict, compliance_frameworks: list
):
for compliance_framework in compliance_frameworks:
for compliance in bulk_compliance_frameworks.values():
# Workaround until we have more Compliance Frameworks
split_compliance = compliance_framework.split("_")
framework = split_compliance[0].upper()
version = split_compliance[1].upper()
provider = split_compliance[2].upper()
if framework in compliance.Framework and compliance.Version == version:
for key in bulk_compliance_frameworks.keys():
framework = bulk_compliance_frameworks[key].Framework
provider = bulk_compliance_frameworks[key].Provider
version = bulk_compliance_frameworks[key].Version
requirements = bulk_compliance_frameworks[key].Requirements
# We can list the compliance requirements for a given framework using the
# bulk_compliance_frameworks keys since they are the compliance specification file name
if compliance_framework == key:
print(
f"Listing {framework} {version} {provider} Compliance Requirements:\n"
)
for requirement in compliance.Requirements:
for requirement in requirements:
checks = ""
for check in requirement.Checks:
checks += f" {Fore.YELLOW}\t\t{check}\n{Style.RESET_ALL}"
@@ -200,9 +215,16 @@ def print_checks(
)
sys.exit(1)
print(
f"\nThere are {Fore.YELLOW}{len(check_list)}{Style.RESET_ALL} available checks.\n"
checks_num = len(check_list)
plural_string = (
f"\nThere are {Fore.YELLOW}{checks_num}{Style.RESET_ALL} available checks.\n"
)
singular_string = (
f"\nThere is {Fore.YELLOW}{checks_num}{Style.RESET_ALL} available check.\n"
)
message = plural_string if checks_num > 1 else singular_string
print(message)
# Parse checks from compliance frameworks specification
@@ -350,7 +372,6 @@ def execute_checks(
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
logger.critical(
f"Check '{check_name}' was not found for the {provider.upper()} provider"
)
@@ -361,8 +382,13 @@ def execute_checks(
)
else:
# Default execution
checks_num = len(checks_to_execute)
plural_string = "checks"
singular_string = "check"
check_noun = plural_string if checks_num > 1 else singular_string
print(
f"{Style.BRIGHT}Executing {len(checks_to_execute)} checks, please wait...{Style.RESET_ALL}\n"
f"{Style.BRIGHT}Executing {checks_num} {check_noun}, please wait...{Style.RESET_ALL}\n"
)
with alive_bar(
total=len(checks_to_execute),
@@ -460,3 +486,86 @@ def update_audit_metadata(
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def recover_checks_from_service(service_list: list, provider: str) -> list:
checks = set()
for service in service_list:
modules = recover_checks_from_provider(provider, service)
if not modules:
logger.error(f"Service '{service}' does not have checks.")
else:
for check_module in modules:
# Recover check name and module name from import path
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
check_name = check_module[0].split(".")[-1]
# If the service is present in the group list passed as parameters
# if service_name in group_list: checks_from_arn.add(check_name)
checks.add(check_name)
return checks
def get_checks_from_input_arn(audit_resources: list, provider: str) -> set:
"""get_checks_from_input_arn gets the list of checks from the input arns"""
checks_from_arn = set()
# Handle if there are audit resources so only their services are executed
if audit_resources:
services_without_subservices = ["guardduty", "kms", "s3", "elb"]
service_list = set()
sub_service_list = set()
for resource in audit_resources:
service = resource.split(":")[2]
sub_service = resource.split(":")[5].split("/")[0].replace("-", "_")
if (
service != "wafv2" and service != "waf"
): # WAF Services does not have checks
# Parse services when they are different in the ARNs
if service == "lambda":
service = "awslambda"
if service == "elasticloadbalancing":
service = "elb"
elif service == "logs":
service = "cloudwatch"
service_list.add(service)
# Get subservices to execute only applicable checks
if service not in services_without_subservices:
# Parse some specific subservices
if service == "ec2":
if sub_service == "security_group":
sub_service = "securitygroup"
if sub_service == "network_acl":
sub_service = "networkacl"
if sub_service == "image":
sub_service = "ami"
if service == "rds":
if sub_service == "cluster_snapshot":
sub_service = "snapshot"
sub_service_list.add(sub_service)
else:
sub_service_list.add(service)
checks = recover_checks_from_service(service_list, provider)
# Filter only checks with audited subservices
for check in checks:
if any(sub_service in check for sub_service in sub_service_list):
if not (sub_service == "policy" and "password_policy" in check):
checks_from_arn.add(check)
# Return final checks list
return sorted(checks_from_arn)
def get_regions_from_audit_resources(audit_resources: list) -> list:
"""get_regions_from_audit_resources gets the regions from the audit resources arns"""
audited_regions = []
for resource in audit_resources:
region = resource.split(":")[3]
if region and region not in audited_regions: # Check if arn has a region
audited_regions.append(region)
if audited_regions:
return audited_regions
return None

View File

@@ -2,9 +2,9 @@ from prowler.lib.check.check import (
parse_checks_from_compliance_framework,
parse_checks_from_file,
recover_checks_from_provider,
recover_checks_from_service,
)
from prowler.lib.logger import logger
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
# Generate the list of checks to execute
@@ -19,25 +19,10 @@ def load_checks_to_execute(
compliance_frameworks: list,
categories: set,
provider: str,
audit_info: AWS_Audit_Info,
) -> set:
"""Generate the list of checks to execute based on the cloud provider and input arguments specified"""
checks_to_execute = set()
# Handle if there are audit resources so only their services are executed
if audit_info.audit_resources:
service_list = []
for resource in audit_info.audit_resources:
service = resource.split(":")[2]
# Parse services when they are different in the ARNs
if service == "lambda":
service = "awslambda"
if service == "elasticloadbalancing":
service = "elb"
elif service == "logs":
service = "cloudwatch"
service_list.append(service)
# Handle if there are checks passed using -c/--checks
if check_list:
for check_name in check_list:
@@ -59,19 +44,7 @@ def load_checks_to_execute(
# Handle if there are services passed using -s/--services
elif service_list:
# Loaded dynamically from modules within provider/services
for service in service_list:
modules = recover_checks_from_provider(provider, service)
if not modules:
logger.error(f"Service '{service}' does not have checks.")
else:
for check_module in modules:
# Recover check name and module name from import path
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
check_name = check_module[0].split(".")[-1]
# If the service is present in the group list passed as parameters
# if service_name in group_list: checks_to_execute.add(check_name)
checks_to_execute.add(check_name)
checks_to_execute = recover_checks_from_service(service_list, provider)
# Handle if there are compliance frameworks passed using --compliance
elif compliance_frameworks:

View File

@@ -4,6 +4,7 @@ from prowler.lib.check.compliance_models import (
Compliance_Base_Model,
Compliance_Requirement,
)
from prowler.lib.check.models import Check_Report_AWS
from prowler.lib.logger import logger
@@ -17,6 +18,7 @@ def update_checks_metadata_with_compliance(
for framework in bulk_compliance_frameworks.values():
for requirement in framework.Requirements:
compliance_requirements = []
# Verify if check is in the requirement
if check in requirement.Checks:
# Create the Compliance_Requirement
requirement = Compliance_Requirement(
@@ -34,12 +36,71 @@ def update_checks_metadata_with_compliance(
Framework=framework.Framework,
Provider=framework.Provider,
Version=framework.Version,
Description=framework.Description,
Requirements=compliance_requirements,
)
# Include the compliance framework for the check
check_compliance.append(compliance)
# Save it into the check's metadata
bulk_checks_metadata[check].Compliance = check_compliance
# Add requirements of Manual Controls
for framework in bulk_compliance_frameworks.values():
for requirement in framework.Requirements:
compliance_requirements = []
# Verify if requirement is Manual
if not requirement.Checks:
compliance_requirements.append(requirement)
# Create the Compliance_Model
compliance = Compliance_Base_Model(
Framework=framework.Framework,
Provider=framework.Provider,
Version=framework.Version,
Description=framework.Description,
Requirements=compliance_requirements,
)
# Include the compliance framework for the check
check_compliance.append(compliance)
# Create metadata for Manual Control
manual_check_metadata = """{
"Provider" : "aws",
"CheckID" : "manual_check",
"CheckTitle" : "Manual Check",
"CheckType" : [],
"ServiceName" : "",
"SubServiceName" : "",
"ResourceIdTemplate" : "",
"Severity" : "",
"ResourceType" : "",
"Description" : "",
"Risk" : "",
"RelatedUrl" : "",
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "",
"Url": ""
}
},
"Categories" : [],
"Tags" : {},
"DependsOn" : [],
"RelatedTo" : [],
"Notes" : ""
}"""
manual_check = Check_Report_AWS(manual_check_metadata)
manual_check.status = "INFO"
manual_check.status_extended = "Manual check"
manual_check.resource_id = "manual_check"
manual_check.Compliance = check_compliance
# Save it into the check's metadata
bulk_checks_metadata["manual_check"] = manual_check
return bulk_checks_metadata
except Exception as e:
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")

View File

@@ -1,8 +1,8 @@
import sys
from enum import Enum
from typing import Any, List, Optional, Union
from typing import Optional, Union
from pydantic import BaseModel, ValidationError
from pydantic import BaseModel, ValidationError, root_validator
from prowler.lib.logger import logger
@@ -11,10 +11,10 @@ from prowler.lib.logger import logger
class ENS_Requirements_Nivel(str, Enum):
"""ENS V3 Requirements Level"""
opcional = "opcional"
bajo = "bajo"
medio = "medio"
alto = "alto"
pytec = "pytec"
class ENS_Requirements_Dimensiones(str, Enum):
@@ -27,35 +27,101 @@ class ENS_Requirements_Dimensiones(str, Enum):
disponibilidad = "disponibilidad"
class ENS_Requirements_Tipos(str, Enum):
"""ENS Requirements Tipos"""
refuerzo = "refuerzo"
requisito = "requisito"
recomendacion = "recomendacion"
medida = "medida"
class ENS_Requirements(BaseModel):
"""ENS V3 Framework Requirements"""
IdGrupoControl: str
Marco: str
Categoria: str
Descripcion_Control: str
Nivel: list[ENS_Requirements_Nivel]
DescripcionControl: str
Tipo: ENS_Requirements_Tipos
Nivel: ENS_Requirements_Nivel
Dimensiones: list[ENS_Requirements_Dimensiones]
# Generic Compliance Requirements
class Generic_Compliance_Requirements(BaseModel):
"""Generic Compliance Requirements"""
ItemId: str
Section: Optional[str]
SubSection: Optional[str]
SubGroup: Optional[str]
Service: str
Soc_Type: Optional[str]
class CIS_Requirements_Profile(str):
"""CIS Requirements Profile"""
Level_1 = "Level 1"
Level_2 = "Level 2"
class CIS_Requirements_AssessmentStatus(str):
"""CIS Requirements Assessment Status"""
Manual = "Manual"
Automated = "Automated"
# CIS Requirements
class CIS_Requirements(BaseModel):
"""CIS Requirements"""
Section: str
Profile: CIS_Requirements_Profile
AssessmentStatus: CIS_Requirements_AssessmentStatus
Description: str
RationaleStatement: str
ImpactStatement: str
RemediationProcedure: str
AuditProcedure: str
AdditionalInformation: str
References: str
# Base Compliance Model
class Compliance_Requirement(BaseModel):
"""Compliance_Requirement holds the base model for every requirement within a compliance framework"""
Id: str
Description: str
Attributes: list[Union[ENS_Requirements, Any]]
Checks: List[str]
Attributes: list[
Union[CIS_Requirements, ENS_Requirements, Generic_Compliance_Requirements]
]
Checks: list[str]
class Compliance_Base_Model(BaseModel):
"""Compliance_Base_Model holds the base model for every compliance framework"""
Framework: str
Provider: Optional[str]
Version: str
Provider: str
Version: Optional[str]
Description: str
Requirements: list[Compliance_Requirement]
@root_validator(pre=True)
# noqa: F841 - since vulture raises unused variable 'cls'
def framework_and_provider_must_not_be_empty(cls, values): # noqa: F841
framework, provider = (
values.get("Framework"),
values.get("Provider"),
)
if framework == "" or provider == "":
raise ValueError("Framework or Provider must not be empty")
return values
# Testing Pending
def load_compliance_framework(

View File

@@ -2,7 +2,11 @@ import argparse
import sys
from argparse import RawTextHelpFormatter
from prowler.config.config import default_output_directory, prowler_version
from prowler.config.config import (
available_compliance_frameworks,
default_output_directory,
prowler_version,
)
from prowler.providers.aws.aws_provider import get_aws_available_regions
from prowler.providers.aws.lib.arn.arn import is_valid_arn
@@ -24,7 +28,6 @@ class ProwlerArgumentParser:
epilog="""
To see the different available options on a specific provider, run:
prowler {provider} -h|--help
Detailed documentation at https://docs.prowler.cloud
""",
)
@@ -212,7 +215,7 @@ Detailed documentation at https://docs.prowler.cloud
"--compliance",
nargs="+",
help="Compliance Framework to check against for. The format should be the following: framework_version_provider (e.g.: ens_rd2022_aws)",
choices=["ens_rd2022_aws", "cis_1.4_aws", "cis_1.5_aws"],
choices=available_compliance_frameworks,
)
group.add_argument(
"--categories",
@@ -241,7 +244,7 @@ Detailed documentation at https://docs.prowler.cloud
"--list-compliance-requirements",
nargs="+",
help="List compliance requirements for a given requirement",
choices=["ens_rd2022_aws", "cis_1.4_aws", "cis_1.5_aws"],
choices=available_compliance_frameworks,
)
list_group.add_argument(
"--list-categories",
@@ -313,6 +316,11 @@ Detailed documentation at https://docs.prowler.cloud
action="store_true",
help="Send check output to AWS Security Hub",
)
aws_security_hub_subparser.add_argument(
"--skip-sh-update",
action="store_true",
help="Skip updating previous findings of Prowler in Security Hub",
)
# AWS Quick Inventory
aws_quick_inventory_subparser = aws_parser.add_argument_group("Quick Inventory")
aws_quick_inventory_subparser.add_argument(
@@ -376,6 +384,16 @@ Detailed documentation at https://docs.prowler.cloud
help="Scan only resources with specific AWS Resource ARNs, e.g., arn:aws:iam::012345678910:user/test arn:aws:ec2:us-east-1:123456789012:vpc/vpc-12345678",
)
# Boto3 Config
boto3_config_subparser = aws_parser.add_argument_group("Boto3 Config")
boto3_config_subparser.add_argument(
"--aws-retries-max-attempts",
nargs="?",
default=None,
type=int,
help="Set the maximum attemps for the Boto3 standard retrier config (Default: 3)",
)
def __init_azure_parser__(self):
"""Init the Azure Provider CLI parser"""
azure_parser = self.subparsers.add_parser(

View File

@@ -4,102 +4,65 @@ from csv import DictWriter
from colorama import Fore, Style
from tabulate import tabulate
from prowler.config.config import timestamp
from prowler.config.config import orange_color, timestamp
from prowler.lib.logger import logger
from prowler.lib.outputs.models import (
Check_Output_CSV_CIS,
Check_Output_CSV_ENS_RD2022,
Check_Output_CSV_Generic_Compliance,
generate_csv_fields,
)
def add_manual_controls(output_options, audit_info, file_descriptors):
try:
# Check if MANUAL control was already added to output
if "manual_check" in output_options.bulk_checks_metadata:
manual_finding = output_options.bulk_checks_metadata["manual_check"]
fill_compliance(
output_options, manual_finding, audit_info, file_descriptors
)
del output_options.bulk_checks_metadata["manual_check"]
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def fill_compliance(output_options, finding, audit_info, file_descriptors):
# We have to retrieve all the check's compliance requirements
check_compliance = output_options.bulk_checks_metadata[
finding.check_metadata.CheckID
].Compliance
csv_header = compliance_row = compliance_output = None
for compliance in check_compliance:
if (
compliance.Framework == "ENS"
and compliance.Version == "RD2022"
and "ens_rd2022_aws" in output_options.output_modes
):
compliance_output = "ens_rd2022_aws"
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
for attribute in requirement.Attributes:
compliance_row = Check_Output_CSV_ENS_RD2022(
Provider=finding.check_metadata.Provider,
AccountId=audit_info.audited_account,
Region=finding.region,
AssessmentDate=timestamp.isoformat(),
Requirements_Id=requirement_id,
Requirements_Description=requirement_description,
Requirements_Attributes_IdGrupoControl=attribute.get(
"IdGrupoControl"
),
Requirements_Attributes_Marco=attribute.get("Marco"),
Requirements_Attributes_Categoria=attribute.get("Categoria"),
Requirements_Attributes_DescripcionControl=attribute.get(
"DescripcionControl"
),
Requirements_Attributes_Nivel=attribute.get("Nivel"),
Requirements_Attributes_Tipo=attribute.get("Tipo"),
Requirements_Attributes_Dimensiones=",".join(
attribute.get("Dimensiones")
),
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
)
csv_header = generate_csv_fields(Check_Output_CSV_ENS_RD2022)
elif compliance.Framework == "CIS-AWS" and "cis" in str(
output_options.output_modes
):
# Only with the version of CIS that was selected
if "cis_" + compliance.Version + "_aws" in str(output_options.output_modes):
compliance_output = "cis_" + compliance.Version + "_aws"
try:
# We have to retrieve all the check's compliance requirements
check_compliance = output_options.bulk_checks_metadata[
finding.check_metadata.CheckID
].Compliance
for compliance in check_compliance:
csv_header = compliance_row = compliance_output = None
if (
compliance.Framework == "ENS"
and compliance.Version == "RD2022"
and "ens_rd2022_aws" in output_options.output_modes
):
compliance_output = "ens_rd2022_aws"
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
for attribute in requirement.Attributes:
compliance_row = Check_Output_CSV_CIS(
compliance_row = Check_Output_CSV_ENS_RD2022(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=audit_info.audited_account,
Region=finding.region,
AssessmentDate=timestamp.isoformat(),
Requirements_Id=requirement_id,
Requirements_Description=requirement_description,
Requirements_Attributes_Section=attribute.get("Section"),
Requirements_Attributes_Profile=attribute.get("Profile"),
Requirements_Attributes_AssessmentStatus=attribute.get(
"AssessmentStatus"
),
Requirements_Attributes_Description=attribute.get(
"Description"
),
Requirements_Attributes_RationaleStatement=attribute.get(
"RationaleStatement"
),
Requirements_Attributes_ImpactStatement=attribute.get(
"ImpactStatement"
),
Requirements_Attributes_RemediationProcedure=attribute.get(
"RemediationProcedure"
),
Requirements_Attributes_AuditProcedure=attribute.get(
"AuditProcedure"
),
Requirements_Attributes_AdditionalInformation=attribute.get(
"AdditionalInformation"
),
Requirements_Attributes_References=attribute.get(
"References"
Requirements_Attributes_IdGrupoControl=attribute.IdGrupoControl,
Requirements_Attributes_Marco=attribute.Marco,
Requirements_Attributes_Categoria=attribute.Categoria,
Requirements_Attributes_DescripcionControl=attribute.DescripcionControl,
Requirements_Attributes_Nivel=attribute.Nivel,
Requirements_Attributes_Tipo=attribute.Tipo,
Requirements_Attributes_Dimensiones=",".join(
attribute.Dimensiones
),
Status=finding.status,
StatusExtended=finding.status_extended,
@@ -107,15 +70,93 @@ def fill_compliance(output_options, finding, audit_info, file_descriptors):
CheckId=finding.check_metadata.CheckID,
)
csv_header = generate_csv_fields(Check_Output_CSV_CIS)
csv_header = generate_csv_fields(Check_Output_CSV_ENS_RD2022)
if compliance_row:
csv_writer = DictWriter(
file_descriptors[compliance_output],
fieldnames=csv_header,
delimiter=";",
)
csv_writer.writerow(compliance_row.__dict__)
elif compliance.Framework == "CIS" and "cis_" in str(
output_options.output_modes
):
# Only with the version of CIS that was selected
if "cis_" + compliance.Version + "_aws" in str(
output_options.output_modes
):
compliance_output = "cis_" + compliance.Version + "_aws"
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
for attribute in requirement.Attributes:
compliance_row = Check_Output_CSV_CIS(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=audit_info.audited_account,
Region=finding.region,
AssessmentDate=timestamp.isoformat(),
Requirements_Id=requirement_id,
Requirements_Description=requirement_description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_Profile=attribute.Profile,
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
Requirements_Attributes_Description=attribute.Description,
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_References=attribute.References,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
)
csv_header = generate_csv_fields(Check_Output_CSV_CIS)
else:
compliance_output = compliance.Framework
if compliance.Version != "":
compliance_output += "_" + compliance.Version
if compliance.Provider != "":
compliance_output += "_" + compliance.Provider
compliance_output = compliance_output.lower().replace("-", "_")
if compliance_output in output_options.output_modes:
for requirement in compliance.Requirements:
requirement_description = requirement.Description
requirement_id = requirement.Id
for attribute in requirement.Attributes:
compliance_row = Check_Output_CSV_Generic_Compliance(
Provider=finding.check_metadata.Provider,
Description=compliance.Description,
AccountId=audit_info.audited_account,
Region=finding.region,
AssessmentDate=timestamp.isoformat(),
Requirements_Id=requirement_id,
Requirements_Description=requirement_description,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_SubSection=attribute.SubSection,
Requirements_Attributes_SubGroup=attribute.SubGroup,
Requirements_Attributes_Service=attribute.Service,
Requirements_Attributes_Soc_Type=attribute.Soc_Type,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,
CheckId=finding.check_metadata.CheckID,
)
csv_header = generate_csv_fields(
Check_Output_CSV_Generic_Compliance
)
if compliance_row:
csv_writer = DictWriter(
file_descriptors[compliance_output],
fieldnames=csv_header,
delimiter=";",
)
csv_writer.writerow(compliance_row.__dict__)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def display_compliance_table(
@@ -126,16 +167,16 @@ def display_compliance_table(
output_directory: str,
):
try:
if "ens_rd2022_aws" in compliance_framework:
if "ens_rd2022_aws" == compliance_framework:
marcos = {}
ens_compliance_table = {
"Proveedor": [],
"Marco/Categoria": [],
"Estado": [],
"PYTEC": [],
"Alto": [],
"Medio": [],
"Bajo": [],
"Opcional": [],
}
pass_count = fail_count = 0
for finding in findings:
@@ -153,13 +194,13 @@ def display_compliance_table(
for requirement in compliance.Requirements:
for attribute in requirement.Attributes:
marco_categoria = (
f"{attribute['Marco']}/{attribute['Categoria']}"
f"{attribute.Marco}/{attribute.Categoria}"
)
# Check if Marco/Categoria exists
if marco_categoria not in marcos:
marcos[marco_categoria] = {
"Estado": f"{Fore.GREEN}CUMPLE{Style.RESET_ALL}",
"Pytec": 0,
"Opcional": 0,
"Alto": 0,
"Medio": 0,
"Bajo": 0,
@@ -171,13 +212,13 @@ def display_compliance_table(
] = f"{Fore.RED}NO CUMPLE{Style.RESET_ALL}"
elif finding.status == "PASS":
pass_count += 1
if attribute["Nivel"] == "pytec":
marcos[marco_categoria]["Pytec"] += 1
elif attribute["Nivel"] == "alto":
if attribute.Nivel == "opcional":
marcos[marco_categoria]["Opcional"] += 1
elif attribute.Nivel == "alto":
marcos[marco_categoria]["Alto"] += 1
elif attribute["Nivel"] == "medio":
elif attribute.Nivel == "medio":
marcos[marco_categoria]["Medio"] += 1
elif attribute["Nivel"] == "bajo":
elif attribute.Nivel == "bajo":
marcos[marco_categoria]["Bajo"] += 1
# Add results to table
@@ -185,17 +226,17 @@ def display_compliance_table(
ens_compliance_table["Proveedor"].append("aws")
ens_compliance_table["Marco/Categoria"].append(marco)
ens_compliance_table["Estado"].append(marcos[marco]["Estado"])
ens_compliance_table["PYTEC"].append(
f"{Fore.LIGHTRED_EX}{marcos[marco]['Pytec']}{Style.RESET_ALL}"
ens_compliance_table["Opcional"].append(
f"{Fore.BLUE}{marcos[marco]['Opcional']}{Style.RESET_ALL}"
)
ens_compliance_table["Alto"].append(
f"{Fore.RED}{marcos[marco]['Alto']}{Style.RESET_ALL}"
f"{Fore.LIGHTRED_EX}{marcos[marco]['Alto']}{Style.RESET_ALL}"
)
ens_compliance_table["Medio"].append(
f"{Fore.YELLOW}{marcos[marco]['Medio']}{Style.RESET_ALL}"
f"{orange_color}{marcos[marco]['Medio']}{Style.RESET_ALL}"
)
ens_compliance_table["Bajo"].append(
f"{Fore.BLUE}{marcos[marco]['Bajo']}{Style.RESET_ALL}"
f"{Fore.YELLOW}{marcos[marco]['Bajo']}{Style.RESET_ALL}"
)
if fail_count + pass_count < 0:
print(
@@ -223,11 +264,11 @@ def display_compliance_table(
print(
f"{Style.BRIGHT}* Solo aparece el Marco/Categoria que contiene resultados.{Style.RESET_ALL}"
)
print("\nResultados detallados en:")
print(f"\nResultados detallados de {compliance_fm} en:")
print(
f" - CSV: {output_directory}/{output_filename}_{compliance_framework[0]}.csv\n"
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
)
if "cis" in str(compliance_framework):
elif "cis_1." in compliance_framework:
sections = {}
cis_compliance_table = {
"Provider": [],
@@ -240,14 +281,15 @@ def display_compliance_table(
check = bulk_checks_metadata[finding.check_metadata.CheckID]
check_compliances = check.Compliance
for compliance in check_compliances:
if compliance.Framework == "CIS-AWS" and compliance.Version in str(
compliance_framework
if (
compliance.Framework == "CIS"
and compliance.Version in compliance_framework
):
compliance_version = compliance.Version
compliance_fm = compliance.Framework
for requirement in compliance.Requirements:
for attribute in requirement.Attributes:
section = attribute["Section"]
section = attribute.Section
# Check if Section exists
if section not in sections:
sections[section] = {
@@ -259,12 +301,12 @@ def display_compliance_table(
fail_count += 1
elif finding.status == "PASS":
pass_count += 1
if attribute["Profile"] == "Level 1":
if attribute.Profile == "Level 1":
if finding.status == "FAIL":
sections[section]["Level 1"]["FAIL"] += 1
else:
sections[section]["Level 1"]["PASS"] += 1
elif attribute["Profile"] == "Level 2":
elif attribute.Profile == "Level 2":
if finding.status == "FAIL":
sections[section]["Level 2"]["FAIL"] += 1
else:
@@ -291,7 +333,7 @@ def display_compliance_table(
cis_compliance_table["Level 2"].append(
f"{Fore.GREEN}PASS({sections[section]['Level 2']['PASS']}){Style.RESET_ALL}"
)
if fail_count + pass_count < 0:
if fail_count + pass_count < 1:
print(
f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}-{compliance_version}{Style.RESET_ALL}.\n"
)
@@ -317,10 +359,15 @@ def display_compliance_table(
print(
f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}"
)
print("\nDetailed Results in:")
print(f"\nDetailed results of {compliance_fm} are in:")
print(
f" - CSV: {output_directory}/{output_filename}_{compliance_framework[0]}.csv\n"
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
)
else:
print(f"\nDetailed results of {compliance_framework.upper()} are in:")
print(
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"

View File

@@ -15,6 +15,7 @@ from prowler.lib.outputs.models import (
Azure_Check_Output_CSV,
Check_Output_CSV_CIS,
Check_Output_CSV_ENS_RD2022,
Check_Output_CSV_Generic_Compliance,
generate_csv_fields,
)
from prowler.lib.utils.utils import file_exists, open_file
@@ -41,18 +42,17 @@ def initialize_file_descriptor(
"a",
)
if output_mode in ("csv", "ens_rd2022_aws", "cis_1.5_aws", "cis_1.4_aws"):
if output_mode in ("json", "json-asff"):
file_descriptor.write("[")
elif "html" in output_mode:
add_html_header(file_descriptor, audit_info)
else:
# Format is the class model of the CSV format to print the headers
csv_header = [x.upper() for x in generate_csv_fields(format)]
csv_writer = DictWriter(
file_descriptor, fieldnames=csv_header, delimiter=";"
)
csv_writer.writeheader()
if output_mode in ("json", "json-asff"):
file_descriptor.write("[")
if "html" in output_mode:
add_html_header(file_descriptor, audit_info)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -84,15 +84,14 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "json":
elif output_mode == "json":
filename = f"{output_directory}/{output_filename}{json_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info
)
file_descriptors.update({output_mode: file_descriptor})
if isinstance(audit_info, AWS_Audit_Info):
elif isinstance(audit_info, AWS_Audit_Info):
if output_mode == "json-asff":
filename = f"{output_directory}/{output_filename}{json_asff_file_suffix}"
file_descriptor = initialize_file_descriptor(
@@ -100,7 +99,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "html":
elif output_mode == "html":
filename = (
f"{output_directory}/{output_filename}{html_file_suffix}"
)
@@ -109,7 +108,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "ens_rd2022_aws":
elif output_mode == "ens_rd2022_aws":
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename,
@@ -119,19 +118,31 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "cis_1.5_aws":
elif output_mode == "cis_1.5_aws":
filename = f"{output_directory}/{output_filename}_cis_1.5_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info, Check_Output_CSV_CIS
)
file_descriptors.update({output_mode: file_descriptor})
if output_mode == "cis_1.4_aws":
elif output_mode == "cis_1.4_aws":
filename = f"{output_directory}/{output_filename}_cis_1.4_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info, Check_Output_CSV_CIS
)
file_descriptors.update({output_mode: file_descriptor})
else:
# Generic Compliance framework
filename = f"{output_directory}/{output_filename}_{output_mode}{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Check_Output_CSV_Generic_Compliance,
)
file_descriptors.update({output_mode: file_descriptor})
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -31,7 +31,7 @@ def fill_json_asff(finding_output, audit_info, finding):
) = finding_output.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
finding_output.Severity = Severity(Label=finding.check_metadata.Severity.upper())
finding_output.Title = finding.check_metadata.CheckTitle
finding_output.Description = finding.check_metadata.Description
finding_output.Description = finding.status_extended
finding_output.Resources = [
Resource(
Id=finding.resource_arn,

View File

@@ -318,6 +318,7 @@ class Check_Output_CSV_ENS_RD2022(BaseModel):
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
@@ -338,10 +339,11 @@ class Check_Output_CSV_ENS_RD2022(BaseModel):
class Check_Output_CSV_CIS(BaseModel):
"""
Check_Output_CSV_ENS_RD2022 generates a finding's output in CSV CIS format.
Check_Output_CSV_CIS generates a finding's output in CSV CIS format.
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
@@ -363,6 +365,29 @@ class Check_Output_CSV_CIS(BaseModel):
CheckId: str
class Check_Output_CSV_Generic_Compliance(BaseModel):
"""
Check_Output_CSV_Generic_Compliance generates a finding's output in CSV Generic Compliance format.
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_Section: Optional[str]
Requirements_Attributes_SubSection: Optional[str]
Requirements_Attributes_SubGroup: Optional[str]
Requirements_Attributes_Service: str
Requirements_Attributes_Soc_Type: Optional[str]
Status: str
StatusExtended: str
ResourceId: str
CheckId: str
# JSON ASFF Output
class ProductFields(BaseModel):
ProviderName: str = "Prowler"

View File

@@ -4,6 +4,7 @@ import sys
from colorama import Fore, Style
from prowler.config.config import (
available_compliance_frameworks,
csv_file_suffix,
html_file_suffix,
json_asff_file_suffix,
@@ -11,7 +12,7 @@ from prowler.config.config import (
orange_color,
)
from prowler.lib.logger import logger
from prowler.lib.outputs.compliance import fill_compliance
from prowler.lib.outputs.compliance import add_manual_controls, fill_compliance
from prowler.lib.outputs.file_descriptors import fill_file_descriptors
from prowler.lib.outputs.html import fill_html
from prowler.lib.outputs.json import fill_json_asff
@@ -82,9 +83,9 @@ def report(check_findings, output_options, audit_info):
if not (finding.status != "FAIL" and output_options.is_quiet):
# AWS specific outputs
if finding.check_metadata.Provider == "aws":
if (
"ens_rd2022_aws" in output_options.output_modes
or "cis" in str(output_options.output_modes)
if any(
compliance in output_options.output_modes
for compliance in available_compliance_frameworks
):
fill_compliance(
output_options,
@@ -93,6 +94,12 @@ def report(check_findings, output_options, audit_info):
file_descriptors,
)
add_manual_controls(
output_options,
audit_info,
file_descriptors,
)
if "html" in file_descriptors:
fill_html(file_descriptors["html"], finding)
file_descriptors["html"].write("")

View File

@@ -130,7 +130,7 @@ def generate_regional_clients(
regions = regions[:1]
for region in regions:
regional_client = audit_info.audit_session.client(
service, region_name=region
service, region_name=region, config=audit_info.session_config
)
regional_client.region = region
regional_clients[region] = regional_client

View File

@@ -578,13 +578,14 @@
"us-west-2",
"af-south-1",
"ap-northeast-2",
"ap-southeast-3",
"eu-central-1",
"eu-south-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"me-south-1",
"eu-north-1",
"eu-west-3",
"sa-east-1",
"us-east-2",
"us-west-1"
@@ -1629,6 +1630,13 @@
]
}
},
"cloudtrail-data": {
"regions": {
"aws": [],
"aws-cn": [],
"aws-us-gov": []
}
},
"cloudwatch": {
"regions": {
"aws": [
@@ -2179,9 +2187,27 @@
"aws-us-gov": []
}
},
"connectcampaigns": {
"regions": {
"aws": [
"ap-southeast-2",
"ca-central-1",
"eu-west-2",
"us-east-1",
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"connectcases": {
"regions": {
"aws": [
"ap-southeast-1",
"ap-southeast-2",
"ca-central-1",
"eu-central-1",
"eu-west-2",
"us-east-1",
"us-west-2"
],
@@ -2558,17 +2584,17 @@
"af-south-1",
"ap-east-1",
"ap-northeast-1",
"ap-northeast-3",
"ap-southeast-1",
"ca-central-1",
"eu-south-2",
"eu-west-1",
"eu-west-3",
"us-east-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-southeast-4",
"eu-west-2",
"me-south-1",
"eu-central-2",
"sa-east-1",
"us-east-2",
"us-west-2",
@@ -2577,7 +2603,9 @@
"eu-central-1",
"eu-north-1",
"eu-south-1",
"eu-west-2",
"me-central-1",
"me-south-1",
"us-west-1"
],
"aws-cn": [
@@ -2989,6 +3017,7 @@
"regions": {
"aws": [
"ap-south-1",
"ap-south-2",
"ca-central-1",
"eu-west-1",
"eu-west-2",
@@ -2998,7 +3027,6 @@
"af-south-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"eu-central-2",
"eu-south-1",
"eu-south-2",
@@ -3006,6 +3034,7 @@
"me-central-1",
"sa-east-1",
"ap-east-1",
"ap-northeast-3",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
@@ -3015,8 +3044,8 @@
"us-west-2"
],
"aws-cn": [
"cn-north-1",
"cn-northwest-1"
"cn-northwest-1",
"cn-north-1"
],
"aws-us-gov": [
"us-gov-east-1",
@@ -3305,20 +3334,23 @@
"sa-east-1",
"us-east-1",
"us-east-2",
"us-west-2",
"ap-northeast-1",
"ap-northeast-2",
"ap-southeast-2",
"ca-central-1",
"eu-central-1",
"eu-west-2",
"us-west-1"
"us-west-1",
"us-west-2"
],
"aws-cn": [
"cn-northwest-1",
"cn-north-1"
],
"aws-us-gov": []
"aws-us-gov": [
"us-gov-east-1",
"us-gov-west-1"
]
}
},
"emr-serverless": {
@@ -4132,27 +4164,29 @@
"aws": [
"af-south-1",
"ap-northeast-3",
"ap-southeast-1",
"ap-southeast-3",
"eu-central-1",
"eu-south-2",
"eu-west-2",
"sa-east-1",
"us-east-1",
"us-west-2",
"ap-east-1",
"ap-northeast-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ca-central-1",
"eu-south-1",
"eu-west-1",
"eu-west-3",
"me-central-1",
"me-south-1",
"us-east-2",
"ap-northeast-2",
"ap-south-1",
"eu-central-2",
"eu-north-1",
"eu-west-3",
"me-central-1",
"us-west-1"
],
"aws-cn": [
@@ -4849,6 +4883,7 @@
"kendra": {
"regions": {
"aws": [
"ap-northeast-1",
"ap-south-1",
"ap-southeast-1",
"ap-southeast-2",
@@ -4867,6 +4902,8 @@
"kendra-ranking": {
"regions": {
"aws": [
"ap-northeast-1",
"ap-south-1",
"ap-southeast-1",
"ap-southeast-2",
"ca-central-1",
@@ -4937,14 +4974,15 @@
"eu-north-1",
"eu-south-1",
"eu-west-1",
"eu-west-3",
"me-central-1",
"us-east-1",
"us-west-1",
"af-south-1",
"ap-northeast-3",
"ap-south-1",
"eu-central-1",
"eu-west-2"
"eu-west-2",
"eu-west-3"
],
"aws-cn": [
"cn-north-1",
@@ -5027,8 +5065,8 @@
"regions": {
"aws": [
"af-south-1",
"ap-northeast-2",
"ap-southeast-1",
"ap-southeast-3",
"ca-central-1",
"eu-central-1",
"eu-north-1",
@@ -5037,14 +5075,15 @@
"us-west-1",
"ap-east-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"me-south-1",
"us-west-2",
"ap-south-1",
"ap-southeast-2",
"eu-west-2",
"sa-east-1",
"us-east-2"
],
@@ -6807,13 +6846,14 @@
"us-west-1",
"ap-east-1",
"ap-northeast-1",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-west-2",
"me-central-1",
"me-south-1",
"sa-east-1",
"us-west-2"
"us-west-2",
"eu-central-1"
],
"aws-cn": [
"cn-north-1",
@@ -6994,13 +7034,15 @@
"ap-east-1",
"ap-southeast-2",
"ca-central-1",
"eu-central-1",
"eu-north-1",
"eu-west-3",
"me-central-1",
"sa-east-1",
"us-west-1",
"ap-northeast-1",
"ap-south-1",
"eu-central-1",
"eu-south-2",
"eu-west-2",
"us-east-1",
"us-east-2"
@@ -7561,12 +7603,12 @@
"ca-central-1",
"eu-west-2",
"me-central-1",
"us-west-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-southeast-2",
"eu-north-1",
"sa-east-1",
"us-west-1",
"us-west-2"
],
"aws-cn": [
@@ -7574,7 +7616,8 @@
"cn-north-1"
],
"aws-us-gov": [
"us-gov-west-1"
"us-gov-west-1",
"us-gov-east-1"
]
}
},
@@ -7798,28 +7841,31 @@
"ap-southeast-2",
"ap-southeast-3",
"ca-central-1",
"eu-north-1",
"eu-west-1",
"eu-west-2",
"us-east-2",
"ap-east-1",
"ap-northeast-3",
"eu-central-1",
"eu-north-1",
"me-south-1",
"sa-east-1",
"us-east-1",
"us-west-1",
"us-west-2",
"af-south-1",
"ap-southeast-1",
"eu-south-1",
"eu-west-3",
"me-central-1"
"me-central-1",
"us-east-1"
],
"aws-cn": [
"cn-northwest-1",
"cn-north-1"
],
"aws-cn": [],
"aws-us-gov": [
"us-gov-east-1",
"us-gov-west-1"
"us-gov-west-1",
"us-gov-east-1"
]
}
},
@@ -8026,6 +8072,22 @@
"aws-us-gov": []
}
},
"simpledb": {
"regions": {
"aws": [
"ap-northeast-1",
"ap-southeast-1",
"ap-southeast-2",
"eu-west-1",
"sa-east-1",
"us-east-1",
"us-west-1",
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"simspaceweaver": {
"regions": {
"aws": [

View File

@@ -1,4 +1,5 @@
from boto3 import session
from botocore.config import Config
from prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Audit_Info
@@ -9,6 +10,9 @@ current_audit_info = AWS_Audit_Info(
profile_name=None,
botocore_session=None,
),
# Default standard retrier config
# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html
session_config=Config(retries={"max_attempts": 3, "mode": "standard"}),
audited_account=None,
audited_user_id=None,
audited_partition=None,

View File

@@ -3,6 +3,7 @@ from datetime import datetime
from typing import Any, Optional
from boto3 import session
from botocore.config import Config
@dataclass
@@ -33,6 +34,8 @@ class AWS_Organizations_Info:
class AWS_Audit_Info:
original_session: session.Session
audit_session: session.Session
# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html
session_config: Config
audited_account: int
audited_identity_arn: str
audited_user_id: str

View File

@@ -50,7 +50,6 @@ def quick_inventory(audit_info: AWS_Audit_Info, output_directory: str):
or region == "us-gov-west-1"
or region == "cn-north-1"
):
get_roles_paginator = iam_client.get_paginator("list_roles")
for page in get_roles_paginator.paginate():
for role in page["Roles"]:
@@ -117,7 +116,6 @@ def quick_inventory(audit_info: AWS_Audit_Info, output_directory: str):
def create_inventory_table(resources: list) -> dict:
services = {}
# { "S3":
# 123,
@@ -143,6 +141,14 @@ def create_inventory_table(resources: list) -> dict:
resource_type = "topic"
elif service == "sqs":
resource_type = "queue"
elif service == "apigateway":
split_parts = resource.split(":")[5].split("/")
if "integration" in split_parts and "responses" in split_parts:
resource_type = "restapis-resources-methods-integration-response"
elif "documentation" in split_parts and "parts" in split_parts:
resource_type = "restapis-documentation-parts"
else:
resource_type = resource.split(":")[5].split("/")[1]
else:
resource_type = resource.split(":")[5].split("/")[0]
if service not in resources_type:
@@ -171,7 +177,6 @@ def create_inventory_table(resources: list) -> dict:
def create_output(resources: list, audit_info: AWS_Audit_Info, output_directory: str):
json_output = []
output_file = f"{output_directory}/prowler-inventory-{audit_info.audited_account}-{output_file_timestamp}"

View File

@@ -0,0 +1,36 @@
{
"Provider": "aws",
"CheckID": "accessanalyzer_enabled",
"CheckTitle": "Check if IAM Access Analyzer is enabled",
"CheckType": [
"IAM"
],
"ServiceName": "accessanalyzer",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:access-analyzer:region:account-id:analyzer/resource-id",
"Severity": "low",
"ResourceType": "Other",
"Description": "Check if IAM Access Analyzer is enabled",
"Risk": "AWS IAM Access Analyzer helps you identify the resources in your organization and accounts, such as Amazon S3 buckets or IAM roles, that are shared with an external entity. This lets you identify unintended access to your resources and data, which is a security risk. IAM Access Analyzer uses a form of mathematical analysis called automated reasoning, which applies logic and mathematical inference to determine all possible access paths allowed by a resource policy.",
"RelatedUrl": "https://docs.aws.amazon.com/IAM/latest/UserGuide/what-is-access-analyzer.html",
"Remediation": {
"Code": {
"CLI": "aws accessanalyzer create-analyzer --analyzer-name <NAME> --type <ACCOUNT|ORGANIZATION>",
"NativeIaC": "",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "Enable IAM Access Analyzer for all accounts, create analyzer and take action over it is recommendations (IAM Access Analyzer is available at no additional cost).",
"Url": "https://docs.aws.amazon.com/IAM/latest/UserGuide/what-is-access-analyzer.html"
}
},
"Categories": [],
"Tags": {
"Tag1Key": "value",
"Tag2Key": "value"
},
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -0,0 +1,36 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_client import (
accessanalyzer_client,
)
class accessanalyzer_enabled(Check):
def execute(self):
findings = []
for analyzer in accessanalyzer_client.analyzers:
report = Check_Report_AWS(self.metadata())
report.region = analyzer.region
if analyzer.status == "ACTIVE":
report.status = "PASS"
report.status_extended = (
f"IAM Access Analyzer {analyzer.name} is enabled"
)
report.resource_id = analyzer.name
report.resource_arn = analyzer.arn
elif analyzer.status == "NOT_AVAILABLE":
report.status = "FAIL"
report.status_extended = (
f"IAM Access Analyzer in account {analyzer.name} is not enabled"
)
report.resource_id = analyzer.name
else:
report.status = "FAIL"
report.status_extended = (
f"IAM Access Analyzer {analyzer.name} is not active"
)
report.resource_id = analyzer.name
report.resource_arn = analyzer.arn
findings.append(report)
return findings

View File

@@ -31,7 +31,7 @@ class accessanalyzer_enabled_without_findings(Check):
elif analyzer.status == "NOT_AVAILABLE":
report.status = "FAIL"
report.status_extended = (
f"IAM Access Analyzer {analyzer.name} is not enabled"
f"IAM Access Analyzer in account {analyzer.name} is not enabled"
)
report.resource_id = analyzer.name
else:

View File

@@ -75,7 +75,7 @@ class AccessAnalyzer:
logger.info("AccessAnalyzer - Get Finding status...")
try:
for analyzer in self.analyzers:
if analyzer.status != "NOT_AVAILABLE":
if analyzer.status == "ACTIVE":
regional_client = self.regional_clients[analyzer.region]
for finding in analyzer.findings:
finding_information = regional_client.get_finding(
@@ -92,7 +92,7 @@ class AccessAnalyzer:
logger.info("AccessAnalyzer - Listing Findings per Analyzer...")
try:
for analyzer in self.analyzers:
if analyzer.status != "NOT_AVAILABLE":
if analyzer.status == "ACTIVE":
regional_client = self.regional_clients[analyzer.region]
list_findings_paginator = regional_client.get_paginator(
"list_findings"

View File

@@ -1,7 +1,7 @@
{
"Provider": "aws",
"CheckID": "appstream_fleet_session_disconnect_timeout",
"CheckTitle": "Ensure session disconnect timeout is set to 5 minutes or lesss.",
"CheckTitle": "Ensure session disconnect timeout is set to 5 minutes or less.",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards",

View File

@@ -22,6 +22,7 @@ class appstream_fleet_session_idle_disconnect_timeout(Check):
if (
fleet.idle_disconnect_timeout_in_seconds
and fleet.idle_disconnect_timeout_in_seconds
<= max_idle_disconnect_timeout_in_seconds
):
report.status = "PASS"

View File

@@ -1,5 +1,7 @@
import threading
from dataclasses import dataclass
from typing import Optional
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
@@ -48,9 +50,9 @@ class AppStream:
disconnect_timeout_in_seconds=fleet[
"DisconnectTimeoutInSeconds"
],
idle_disconnect_timeout_in_seconds=fleet[
idle_disconnect_timeout_in_seconds=fleet.get(
"IdleDisconnectTimeoutInSeconds"
],
),
enable_default_internet_access=fleet[
"EnableDefaultInternetAccess"
],
@@ -64,29 +66,11 @@ class AppStream:
)
@dataclass
class Fleet:
class Fleet(BaseModel):
arn: str
name: str
max_user_duration_in_seconds: int
disconnect_timeout_in_seconds: int
idle_disconnect_timeout_in_seconds: int
idle_disconnect_timeout_in_seconds: Optional[int]
enable_default_internet_access: bool
def __init__(
self,
arn,
name,
max_user_duration_in_seconds,
disconnect_timeout_in_seconds,
idle_disconnect_timeout_in_seconds,
enable_default_internet_access,
region,
):
self.arn = arn
self.name = name
self.max_user_duration_in_seconds = max_user_duration_in_seconds
self.disconnect_timeout_in_seconds = disconnect_timeout_in_seconds
self.idle_disconnect_timeout_in_seconds = idle_disconnect_timeout_in_seconds
self.enable_default_internet_access = enable_default_internet_access
self.region = region
region: str

View File

@@ -12,31 +12,55 @@ class awslambda_function_no_secrets_in_code(Check):
def execute(self):
findings = []
for function in awslambda_client.functions.values():
report = Check_Report_AWS(self.metadata())
report.region = function.region
report.resource_id = function.name
report.resource_arn = function.arn
if function.code:
report = Check_Report_AWS(self.metadata())
report.region = function.region
report.resource_id = function.name
report.resource_arn = function.arn
report.status = "PASS"
report.status_extended = (
f"No secrets found in Lambda function {function.name} code"
)
report.status = "PASS"
report.status_extended = (
f"No secrets found in Lambda function {function.name} code"
)
with tempfile.TemporaryDirectory() as tmp_dir_name:
function.code.code_zip.extractall(tmp_dir_name)
# List all files
files_in_zip = next(os.walk(tmp_dir_name))[2]
secrets_findings = []
for file in files_in_zip:
secrets = SecretsCollection()
with default_settings():
secrets.scan_file(f"{tmp_dir_name}/{file}")
detect_secrets_output = secrets.json()
if detect_secrets_output:
for (
file_name
) in (
detect_secrets_output.keys()
): # Appears that only 1 file is being scanned at a time, so could rework this
output_file_name = file_name.replace(
f"{tmp_dir_name}/", ""
)
secrets_string = ", ".join(
[
f"{secret['type']} on line {secret['line_number']}"
for secret in detect_secrets_output[file_name]
]
)
secrets_findings.append(
f"{output_file_name}: {secrets_string}"
)
with tempfile.TemporaryDirectory() as tmp_dir_name:
function.code.code_zip.extractall(tmp_dir_name)
# List all files
files_in_zip = next(os.walk(tmp_dir_name))[2]
for file in files_in_zip:
secrets = SecretsCollection()
with default_settings():
secrets.scan_file(f"{tmp_dir_name}/{file}")
if secrets.json():
if secrets_findings:
final_output_string = "; ".join(secrets_findings)
report.status = "FAIL"
report.status_extended = f"Potential secret found in Lambda function {function.name} code"
break
# report.status_extended = f"Potential {'secrets' if len(secrets_findings)>1 else 'secret'} found in Lambda function {function.name} code. {final_output_string}"
if len(secrets_findings) > 1:
report.status_extended = f"Potential secrets found in Lambda function {function.name} code -> {final_output_string}"
else:
report.status_extended = f"Potential secret found in Lambda function {function.name} code -> {final_output_string}"
# break // Don't break as there may be additional findings
findings.append(report)
findings.append(report)
return findings

View File

@@ -27,7 +27,8 @@ class awslambda_function_no_secrets_in_variables(Check):
temp_env_data_file = tempfile.NamedTemporaryFile(delete=False)
temp_env_data_file.write(
bytes(
json.dumps(function.environment), encoding="raw_unicode_escape"
json.dumps(function.environment, indent=2),
encoding="raw_unicode_escape",
)
)
temp_env_data_file.close()
@@ -35,9 +36,17 @@ class awslambda_function_no_secrets_in_variables(Check):
with default_settings():
secrets.scan_file(temp_env_data_file.name)
if secrets.json():
detect_secrets_output = secrets.json()
if detect_secrets_output:
environment_variable_names = list(function.environment.keys())
secrets_string = ", ".join(
[
f"{secret['type']} in variable {environment_variable_names[int(secret['line_number'])-2]}"
for secret in detect_secrets_output[temp_env_data_file.name]
]
)
report.status = "FAIL"
report.status_extended = f"Potential secret found in Lambda function {function.name} variables"
report.status_extended = f"Potential secret found in Lambda function {function.name} variables -> {secrets_string}"
os.remove(temp_env_data_file.name)

View File

@@ -69,7 +69,9 @@ class Lambda:
if "Runtime" in function:
self.functions[lambda_name].runtime = function["Runtime"]
if "Environment" in function:
lambda_environment = function["Environment"]["Variables"]
lambda_environment = function["Environment"].get(
"Variables"
)
self.functions[lambda_name].environment = lambda_environment
except Exception as error:
@@ -87,12 +89,13 @@ class Lambda:
function_information = regional_client.get_function(
FunctionName=function.name
)
code_location_uri = function_information["Code"]["Location"]
raw_code_zip = requests.get(code_location_uri).content
self.functions[function.name].code = LambdaCode(
location=code_location_uri,
code_zip=zipfile.ZipFile(io.BytesIO(raw_code_zip)),
)
if "Location" in function_information["Code"]:
code_location_uri = function_information["Code"]["Location"]
raw_code_zip = requests.get(code_location_uri).content
self.functions[function.name].code = LambdaCode(
location=code_location_uri,
code_zip=zipfile.ZipFile(io.BytesIO(raw_code_zip)),
)
except Exception as error:
logger.error(

View File

@@ -13,7 +13,7 @@
"RelatedUrl": "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/using-cfn-protect-stacks.html",
"Remediation": {
"Code": {
"CLI": "aws cloudformation update-termination-protection --region us-east-1 --stack-name <STACK_NAME> --enable-termination-protection",
"CLI": "aws cloudformation update-termination-protection --region <REGION_NAME> --stack-name <STACK_NAME> --enable-termination-protection",
"NativeIaC": "",
"Other": "",
"Terraform": ""

View File

@@ -20,8 +20,8 @@ class CloudFront:
if global_client:
self.client = list(global_client.values())[0]
self.region = self.client.region
self.distributions = self.__list_distributions__(self.client, self.region)
self.distributions = self.__get_distribution_config__(
self.__list_distributions__(self.client, self.region)
self.__get_distribution_config__(
self.client, self.distributions, self.region
)
@@ -30,7 +30,6 @@ class CloudFront:
def __list_distributions__(self, client, region) -> dict:
logger.info("CloudFront - Listing Distributions...")
distributions = {}
try:
list_ditributions_paginator = client.get_paginator("list_distributions")
for page in list_ditributions_paginator.paginate():
@@ -48,9 +47,7 @@ class CloudFront:
origins=origins,
region=region,
)
distributions[distribution_id] = distribution
return distributions
self.distributions[distribution_id] = distribution
except Exception as error:
logger.error(
@@ -99,8 +96,6 @@ class CloudFront:
logger.error(
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
finally:
return distributions
class OriginsSSLProtocols(Enum):

View File

@@ -10,6 +10,7 @@ class cloudtrail_logs_s3_bucket_access_logging_enabled(Check):
findings = []
for trail in cloudtrail_client.trails:
if trail.name:
trail_bucket_is_in_account = False
trail_bucket = trail.s3_bucket
report = Check_Report_AWS(self.metadata())
report.region = trail.region
@@ -21,13 +22,19 @@ class cloudtrail_logs_s3_bucket_access_logging_enabled(Check):
else:
report.status_extended = f"Single region Trail {trail.name} S3 bucket access logging is not enabled for bucket {trail_bucket}"
for bucket in s3_client.buckets:
if trail_bucket == bucket.name and bucket.logging:
report.status = "PASS"
if trail.is_multiregion:
report.status_extended = f"Multiregion trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}"
else:
report.status_extended = f"Single region trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}"
if trail_bucket == bucket.name:
trail_bucket_is_in_account = True
if bucket.logging:
report.status = "PASS"
if trail.is_multiregion:
report.status_extended = f"Multiregion trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}"
else:
report.status_extended = f"Single region trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}"
break
# check if trail is delivering logs in a cross account bucket
if not trail_bucket_is_in_account:
report.status_extended = f"Trail {trail.name} is delivering logs in a cross-account bucket {trail_bucket} in another account out of Prowler's permissions scope, please check it manually"
findings.append(report)
return findings

View File

@@ -10,6 +10,7 @@ class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check):
findings = []
for trail in cloudtrail_client.trails:
if trail.name:
trail_bucket_is_in_account = False
trail_bucket = trail.s3_bucket
report = Check_Report_AWS(self.metadata())
report.region = trail.region
@@ -23,19 +24,23 @@ class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check):
for bucket in s3_client.buckets:
# Here we need to ensure that acl_grantee is filled since if we don't have permissions to query the api for a concrete region
# (for example due to a SCP) we are going to try access an attribute from a None type
if trail_bucket == bucket.name and bucket.acl_grantees:
for grant in bucket.acl_grantees:
if (
grant.URI
== "http://acs.amazonaws.com/groups/global/AllUsers"
):
report.status = "FAIL"
if trail.is_multiregion:
report.status_extended = f"S3 Bucket {trail_bucket} from multiregion trail {trail.name} is publicly accessible"
else:
report.status_extended = f"S3 Bucket {trail_bucket} from single region trail {trail.name} is publicly accessible"
break
if trail_bucket == bucket.name:
trail_bucket_is_in_account = True
if bucket.acl_grantees:
for grant in bucket.acl_grantees:
if (
grant.URI
== "http://acs.amazonaws.com/groups/global/AllUsers"
):
report.status = "FAIL"
if trail.is_multiregion:
report.status_extended = f"S3 Bucket {trail_bucket} from multiregion trail {trail.name} is publicly accessible"
else:
report.status_extended = f"S3 Bucket {trail_bucket} from single region trail {trail.name} is publicly accessible"
break
# check if trail bucket is a cross account bucket
if not trail_bucket_is_in_account:
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually"
findings.append(report)
return findings

View File

@@ -7,44 +7,33 @@ from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
class cloudtrail_multi_region_enabled(Check):
def execute(self):
findings = []
actual_region = None
for trail in cloudtrail_client.trails:
for region in cloudtrail_client.regional_clients.keys():
report = Check_Report_AWS(self.metadata())
report.region = trail.region
if trail.name: # Check if there are trails in region
# Check if region has changed and add report of previous region
if actual_region != trail.region:
if report: # Check if it not the beginning
findings.append(report)
trail_in_region = False
if not trail_in_region:
report.region = region
for trail in cloudtrail_client.trails:
if trail.region == region:
if trail.is_logging:
report.status = "PASS"
report.resource_id = trail.name
report.resource_arn = trail.arn
if trail.is_multiregion:
report.status_extended = (
f"Trail {trail.name} is multiregion and it is logging"
)
else:
report.status_extended = f"Trail {trail.name} is not multiregion and it is logging"
report.resource_id = trail.name
report.resource_arn = trail.arn
trail_in_region = True # Trail enabled in region
# Since there exists a logging trail in that region there is no point in checking the reamaining trails
# Store the finding and exit the loop
findings.append(report)
break
else:
report.status = "FAIL"
report.status_extended = (
"No CloudTrail trails enabled and logging were found"
)
report.region = cloudtrail_client.region
report.resource_arn = "No trails"
report.resource_id = "No trails"
actual_region = trail.region
else:
report.status = "FAIL"
report.status_extended = (
"No CloudTrail trails enabled and logging were found"
)
report.resource_arn = "No trails"
report.resource_id = "No trails"
# If there are no trails logging it is needed to store the FAIL once all the trails have been checked
if report.status == "FAIL":
findings.append(report)
return findings

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_changes_to_network_acls_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateNetworkAcl.+\$\.eventName\s*=\s*CreateNetworkAclEntry.+\$\.eventName\s*=\s*DeleteNetworkAcl.+\$\.eventName\s*=\s*DeleteNetworkAclEntry.+\$\.eventName\s*=\s*ReplaceNetworkAclEntry.+\$\.eventName\s*=\s*ReplaceNetworkAclAssociation"
pattern = r"\$\.eventName\s*=\s*.?CreateNetworkAcl.+\$\.eventName\s*=\s*.?CreateNetworkAclEntry.+\$\.eventName\s*=\s*.?DeleteNetworkAcl.+\$\.eventName\s*=\s*.?DeleteNetworkAclEntry.+\$\.eventName\s*=\s*.?ReplaceNetworkAclEntry.+\$\.eventName\s*=\s*.?ReplaceNetworkAclAssociation.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_changes_to_network_gateways_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateCustomerGateway.+\$\.eventName\s*=\s*DeleteCustomerGateway.+\$\.eventName\s*=\s*AttachInternetGateway.+\$\.eventName\s*=\s*CreateInternetGateway.+\$\.eventName\s*=\s*DeleteInternetGateway.+\$\.eventName\s*=\s*DetachInternetGateway"
pattern = r"\$\.eventName\s*=\s*.?CreateCustomerGateway.+\$\.eventName\s*=\s*.?DeleteCustomerGateway.+\$\.eventName\s*=\s*.?AttachInternetGateway.+\$\.eventName\s*=\s*.?CreateInternetGateway.+\$\.eventName\s*=\s*.?DeleteInternetGateway.+\$\.eventName\s*=\s*.?DetachInternetGateway.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_changes_to_network_route_tables_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateRoute.+\$\.eventName\s*=\s*CreateRouteTable.+\$\.eventName\s*=\s*ReplaceRoute.+\$\.eventName\s*=\s*ReplaceRouteTableAssociation.+\$\.eventName\s*=\s*DeleteRouteTable.+\$\.eventName\s*=\s*DeleteRoute.+\$\.eventName\s*=\s*DisassociateRouteTable"
pattern = r"\$\.eventName\s*=\s*.?CreateRoute.+\$\.eventName\s*=\s*.?CreateRouteTable.+\$\.eventName\s*=\s*.?ReplaceRoute.+\$\.eventName\s*=\s*.?ReplaceRouteTableAssociation.+\$\.eventName\s*=\s*.?DeleteRouteTable.+\$\.eventName\s*=\s*.?DeleteRoute.+\$\.eventName\s*=\s*.?DisassociateRouteTable.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_changes_to_vpcs_alarm_configured(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateVpc.+\$\.eventName\s*=\s*DeleteVpc.+\$\.eventName\s*=\s*ModifyVpcAttribute.+\$\.eventName\s*=\s*AcceptVpcPeeringConnection.+\$\.eventName\s*=\s*CreateVpcPeeringConnection.+\$\.eventName\s*=\s*DeleteVpcPeeringConnection.+\$\.eventName\s*=\s*RejectVpcPeeringConnection.+\$\.eventName\s*=\s*AttachClassicLinkVpc.+\$\.eventName\s*=\s*DetachClassicLinkVpc.+\$\.eventName\s*=\s*DisableVpcClassicLink.+\$\.eventName\s*=\s*EnableVpcClassicLink"
pattern = r"\$\.eventName\s*=\s*.?CreateVpc.+\$\.eventName\s*=\s*.?DeleteVpc.+\$\.eventName\s*=\s*.?ModifyVpcAttribute.+\$\.eventName\s*=\s*.?AcceptVpcPeeringConnection.+\$\.eventName\s*=\s*.?CreateVpcPeeringConnection.+\$\.eventName\s*=\s*.?DeleteVpcPeeringConnection.+\$\.eventName\s*=\s*.?RejectVpcPeeringConnection.+\$\.eventName\s*=\s*.?AttachClassicLinkVpc.+\$\.eventName\s*=\s*.?DetachClassicLinkVpc.+\$\.eventName\s*=\s*.?DisableVpcClassicLink.+\$\.eventName\s*=\s*.?EnableVpcClassicLink.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_change
Check
):
def execute(self):
pattern = r"\$\.eventSource\s*=\s*config.amazonaws.com.+\$\.eventName\s*=\s*StopConfigurationRecorder.+\$\.eventName\s*=\s*DeleteDeliveryChannel.+\$\.eventName\s*=\s*PutDeliveryChannel.+\$\.eventName\s*=\s*PutConfigurationRecorder"
pattern = r"\$\.eventSource\s*=\s*.?config.amazonaws.com.+\$\.eventName\s*=\s*.?StopConfigurationRecorder.+\$\.eventName\s*=\s*.?DeleteDeliveryChannel.+\$\.eventName\s*=\s*.?PutDeliveryChannel.+\$\.eventName\s*=\s*.?PutConfigurationRecorder.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -14,7 +14,7 @@ class cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_change
Check
):
def execute(self):
pattern = r"\$\.eventName\s*=\s*CreateTrail.+\$\.eventName\s*=\s*UpdateTrail.+\$\.eventName\s*=\s*DeleteTrail.+\$\.eventName\s*=\s*StartLogging.+\$\.eventName\s*=\s*StopLogging"
pattern = r"\$\.eventName\s*=\s*.?CreateTrail.+\$\.eventName\s*=\s*.?UpdateTrail.+\$\.eventName\s*=\s*.?DeleteTrail.+\$\.eventName\s*=\s*.?StartLogging.+\$\.eventName\s*=\s*.?StopLogging.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_authentication_failures(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*ConsoleLogin.+\$\.errorMessage\s*=\s*Failed authentication"
pattern = r"\$\.eventName\s*=\s*.?ConsoleLogin.+\$\.errorMessage\s*=\s*.?Failed authentication.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_aws_organizations_changes(Check):
def execute(self):
pattern = r"\$\.eventSource\s*=\s*organizations\.amazonaws\.com.+\$\.eventName\s*=\s*AcceptHandshake.+\$\.eventName\s*=\s*AttachPolicy.+\$\.eventName\s*=\s*CancelHandshake.+\$\.eventName\s*=\s*CreateAccount.+\$\.eventName\s*=\s*CreateOrganization.+\$\.eventName\s*=\s*CreateOrganizationalUnit.+\$\.eventName\s*=\s*CreatePolicy.+\$\.eventName\s*=\s*DeclineHandshake.+\$\.eventName\s*=\s*DeleteOrganization.+\$\.eventName\s*=\s*DeleteOrganizationalUnit.+\$\.eventName\s*=\s*DeletePolicy.+\$\.eventName\s*=\s*EnableAllFeatures.+\$\.eventName\s*=\s*EnablePolicyType.+\$\.eventName\s*=\s*InviteAccountToOrganization.+\$\.eventName\s*=\s*LeaveOrganization.+\$\.eventName\s*=\s*DetachPolicy.+\$\.eventName\s*=\s*DisablePolicyType.+\$\.eventName\s*=\s*MoveAccount.+\$\.eventName\s*=\s*RemoveAccountFromOrganization.+\$\.eventName\s*=\s*UpdateOrganizationalUnit.+\$\.eventName\s*=\s*UpdatePolicy"
pattern = r"\$\.eventSource\s*=\s*.?organizations\.amazonaws\.com.+\$\.eventName\s*=\s*.?AcceptHandshake.+\$\.eventName\s*=\s*.?AttachPolicy.+\$\.eventName\s*=\s*.?CancelHandshake.+\$\.eventName\s*=\s*.?CreateAccount.+\$\.eventName\s*=\s*.?CreateOrganization.+\$\.eventName\s*=\s*.?CreateOrganizationalUnit.+\$\.eventName\s*=\s*.?CreatePolicy.+\$\.eventName\s*=\s*.?DeclineHandshake.+\$\.eventName\s*=\s*.?DeleteOrganization.+\$\.eventName\s*=\s*.?DeleteOrganizationalUnit.+\$\.eventName\s*=\s*.?DeletePolicy.+\$\.eventName\s*=\s*.?EnableAllFeatures.+\$\.eventName\s*=\s*.?EnablePolicyType.+\$\.eventName\s*=\s*.?InviteAccountToOrganization.+\$\.eventName\s*=\s*.?LeaveOrganization.+\$\.eventName\s*=\s*.?DetachPolicy.+\$\.eventName\s*=\s*.?DisablePolicyType.+\$\.eventName\s*=\s*.?MoveAccount.+\$\.eventName\s*=\s*.?RemoveAccountFromOrganization.+\$\.eventName\s*=\s*.?UpdateOrganizationalUnit.+\$\.eventName\s*=\s*.?UpdatePolicy.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk(Check):
def execute(self):
pattern = r"\$\.eventSource\s*=\s*kms.amazonaws.com.+\$\.eventName\s*=\s*DisableKey.+\$\.eventName\s*=\s*ScheduleKeyDeletion"
pattern = r"\$\.eventSource\s*=\s*.?kms.amazonaws.com.+\$\.eventName\s*=\s*.?DisableKey.+\$\.eventName\s*=\s*.?ScheduleKeyDeletion.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_for_s3_bucket_policy_changes(Check):
def execute(self):
pattern = r"\$\.eventSource\s*=\s*s3.amazonaws.com.+\$\.eventName\s*=\s*PutBucketAcl.+\$\.eventName\s*=\s*PutBucketPolicy.+\$\.eventName\s*=\s*PutBucketCors.+\$\.eventName\s*=\s*PutBucketLifecycle.+\$\.eventName\s*=\s*PutBucketReplication.+\$\.eventName\s*=\s*DeleteBucketPolicy.+\$\.eventName\s*=\s*DeleteBucketCors.+\$\.eventName\s*=\s*DeleteBucketLifecycle.+\$\.eventName\s*=\s*DeleteBucketReplication"
pattern = r"\$\.eventSource\s*=\s*.?s3.amazonaws.com.+\$\.eventName\s*=\s*.?PutBucketAcl.+\$\.eventName\s*=\s*.?PutBucketPolicy.+\$\.eventName\s*=\s*.?PutBucketCors.+\$\.eventName\s*=\s*.?PutBucketLifecycle.+\$\.eventName\s*=\s*.?PutBucketReplication.+\$\.eventName\s*=\s*.?DeleteBucketPolicy.+\$\.eventName\s*=\s*.?DeleteBucketCors.+\$\.eventName\s*=\s*.?DeleteBucketLifecycle.+\$\.eventName\s*=\s*.?DeleteBucketReplication.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_policy_changes(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*DeleteGroupPolicy.+\$\.eventName\s*=\s*DeleteRolePolicy.+\$\.eventName\s*=\s*DeleteUserPolicy.+\$\.eventName\s*=\s*PutGroupPolicy.+\$\.eventName\s*=\s*PutRolePolicy.+\$\.eventName\s*=\s*PutUserPolicy.+\$\.eventName\s*=\s*CreatePolicy.+\$\.eventName\s*=\s*DeletePolicy.+\$\.eventName\s*=\s*CreatePolicyVersion.+\$\.eventName\s*=\s*DeletePolicyVersion.+\$\.eventName\s*=\s*AttachRolePolicy.+\$\.eventName\s*=\s*DetachRolePolicy.+\$\.eventName\s*=\s*AttachUserPolicy.+\$\.eventName\s*=\s*DetachUserPolicy.+\$\.eventName\s*=\s*AttachGroupPolicy.+\$\.eventName\s*=\s*DetachGroupPolicy"
pattern = r"\$\.eventName\s*=\s*.?DeleteGroupPolicy.+\$\.eventName\s*=\s*.?DeleteRolePolicy.+\$\.eventName\s*=\s*.?DeleteUserPolicy.+\$\.eventName\s*=\s*.?PutGroupPolicy.+\$\.eventName\s*=\s*.?PutRolePolicy.+\$\.eventName\s*=\s*.?PutUserPolicy.+\$\.eventName\s*=\s*.?CreatePolicy.+\$\.eventName\s*=\s*.?DeletePolicy.+\$\.eventName\s*=\s*.?CreatePolicyVersion.+\$\.eventName\s*=\s*.?DeletePolicyVersion.+\$\.eventName\s*=\s*.?AttachRolePolicy.+\$\.eventName\s*=\s*.?DetachRolePolicy.+\$\.eventName\s*=\s*.?AttachUserPolicy.+\$\.eventName\s*=\s*.?DetachUserPolicy.+\$\.eventName\s*=\s*.?AttachGroupPolicy.+\$\.eventName\s*=\s*.?DetachGroupPolicy.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_root_usage(Check):
def execute(self):
pattern = r"\$\.userIdentity\.type\s*=\s*Root.+\$\.userIdentity\.invokedBy NOT EXISTS.+\$\.eventType\s*!=\s*AwsServiceEvent"
pattern = r"\$\.userIdentity\.type\s*=\s*.?Root.+\$\.userIdentity\.invokedBy NOT EXISTS.+\$\.eventType\s*!=\s*.?AwsServiceEvent.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_security_group_changes(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*AuthorizeSecurityGroupIngress.+\$\.eventName\s*=\s*AuthorizeSecurityGroupEgress.+\$\.eventName\s*=\s*RevokeSecurityGroupIngress.+\$\.eventName\s*=\s*RevokeSecurityGroupEgress.+\$\.eventName\s*=\s*CreateSecurityGroup.+\$\.eventName\s*=\s*DeleteSecurityGroup"
pattern = r"\$\.eventName\s*=\s*.?AuthorizeSecurityGroupIngress.+\$\.eventName\s*=\s*.?AuthorizeSecurityGroupEgress.+\$\.eventName\s*=\s*.?RevokeSecurityGroupIngress.+\$\.eventName\s*=\s*.?RevokeSecurityGroupEgress.+\$\.eventName\s*=\s*.?CreateSecurityGroup.+\$\.eventName\s*=\s*.?DeleteSecurityGroup.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_sign_in_without_mfa(Check):
def execute(self):
pattern = r"\$\.eventName\s*=\s*ConsoleLogin.+\$\.additionalEventData\.MFAUsed\s*!=\s*Yes"
pattern = r"\$\.eventName\s*=\s*.?ConsoleLogin.+\$\.additionalEventData\.MFAUsed\s*!=\s*.?Yes.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -12,7 +12,7 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
class cloudwatch_log_metric_filter_unauthorized_api_calls(Check):
def execute(self):
pattern = r"\$\.errorCode\s*=\s*\*UnauthorizedOperation.+\$\.errorCode\s*=\s*AccessDenied\*"
pattern = r"\$\.errorCode\s*=\s*.?\*UnauthorizedOperation.+\$\.errorCode\s*=\s*.?AccessDenied\*.?"
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"

View File

@@ -66,9 +66,10 @@ class Codebuild:
if len(ids["ids"]) > 0:
builds = client.batch_get_builds(ids=[ids["ids"][0]])
if "builds" in builds:
project.last_invoked_time = builds["builds"][0][
"endTime"
]
if "endTime" in builds["builds"][0]:
project.last_invoked_time = builds["builds"][0][
"endTime"
]
projects = client.batch_get_projects(names=[project.name])[
"projects"
@@ -86,7 +87,7 @@ class Codebuild:
class CodebuildProject:
name: str
region: str
last_invoked_time: datetime
last_invoked_time: Optional[datetime.datetime]
buildspec: Optional[str]
def __init__(self, name, region, last_invoked_time, buildspec):

View File

@@ -19,8 +19,14 @@ class directoryservice_ldap_certificate_expiration(Check):
report.resource_id = certificate.id
remaining_days_to_expire = (
certificate.expiry_date_time - datetime.today()
certificate.expiry_date_time
- datetime.now(
certificate.expiry_date_time.tz_info
if hasattr(certificate.expiry_date_time, "tz_info")
else None
)
).days
if remaining_days_to_expire <= DAYS_TO_EXPIRE_THRESHOLD:
report.status = "FAIL"
report.status_extended = f"LDAP Certificate {certificate.id} configured at {directory.id} is about to expire in {remaining_days_to_expire} days"

View File

@@ -1,5 +1,6 @@
import os
import tempfile
import zlib
from base64 import b64decode
from detect_secrets import SecretsCollection
@@ -20,7 +21,13 @@ class ec2_instance_secrets_user_data(Check):
if instance.user_data:
temp_user_data_file = tempfile.NamedTemporaryFile(delete=False)
user_data = b64decode(instance.user_data).decode("utf-8")
user_data = b64decode(instance.user_data)
if user_data[0:2] == b"\x1f\x8b": # GZIP magic number
user_data = zlib.decompress(user_data, zlib.MAX_WBITS | 32).decode(
"utf-8"
)
else:
user_data = user_data.decode("utf-8")
temp_user_data_file.write(
bytes(user_data, encoding="raw_unicode_escape")

View File

@@ -1,6 +1,8 @@
import threading
from dataclasses import dataclass
from botocore.client import ClientError
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
@@ -239,6 +241,11 @@ class EC2:
)["UserData"]
if "Value" in user_data:
instance.user_data = user_data["Value"]
except ClientError as error:
if error.response["Error"]["Code"] == "InvalidInstanceID.NotFound":
logger.warning(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -20,13 +20,13 @@ class ecs_task_definitions_no_environment_secrets(Check):
report.status = "PASS"
report.status_extended = f"No secrets found in variables of ECS task definition {task_definition.name} with revision {task_definition.revision}"
if task_definition.environment_variables:
dump_env_vars = {}
for env_var in task_definition.environment_variables:
dump_env_vars = {}
dump_env_vars.update({env_var.name: env_var.value})
temp_env_data_file = tempfile.NamedTemporaryFile(delete=False)
env_data = dumps(dump_env_vars)
env_data = dumps(dump_env_vars, indent=2)
temp_env_data_file.write(bytes(env_data, encoding="raw_unicode_escape"))
temp_env_data_file.close()
@@ -34,9 +34,16 @@ class ecs_task_definitions_no_environment_secrets(Check):
with default_settings():
secrets.scan_file(temp_env_data_file.name)
if secrets.json():
detect_secrets_output = secrets.json()
if detect_secrets_output:
secrets_string = ", ".join(
[
f"{secret['type']} on line {secret['line_number']}"
for secret in detect_secrets_output[temp_env_data_file.name]
]
)
report.status = "FAIL"
report.status_extended = f"Potential secret found in variables of ECS task definition {task_definition.name} with revision {task_definition.revision}"
report.status_extended = f"Potential secret found in variables of ECS task definition {task_definition.name} with revision {task_definition.revision} -> {secrets_string}"
os.remove(temp_env_data_file.name)

View File

@@ -47,17 +47,18 @@ class ELBv2:
elbv2["LoadBalancerArn"], self.audit_resources
)
):
self.loadbalancersv2.append(
LoadBalancerv2(
name=elbv2["LoadBalancerName"],
dns=elbv2["DNSName"],
region=regional_client.region,
arn=elbv2["LoadBalancerArn"],
scheme=elbv2["Scheme"],
type=elbv2["Type"],
listeners=[],
)
lb = LoadBalancerv2(
name=elbv2["LoadBalancerName"],
region=regional_client.region,
arn=elbv2["LoadBalancerArn"],
type=elbv2["Type"],
listeners=[],
)
if "DNSName" in elbv2:
lb.dns = elbv2["DNSName"]
if "Scheme" in elbv2:
lb.scheme = elbv2["Scheme"]
self.loadbalancersv2.append(lb)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -78,16 +79,19 @@ class ELBv2:
port = 0
if "Port" in listener:
port = listener["Port"]
lb.listeners.append(
Listenerv2(
region=regional_client.region,
arn=listener["ListenerArn"],
port=port,
protocol=listener["Protocol"],
ssl_policy=listener.get("SslPolicy"),
rules=[],
)
listener_obj = Listenerv2(
region=regional_client.region,
arn=listener["ListenerArn"],
port=port,
ssl_policy=listener.get("SslPolicy"),
rules=[],
)
if "Protocol" in listener:
listener_obj.protocol = listener["Protocol"]
lb.listeners.append(listener_obj)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -150,20 +154,20 @@ class Listenerv2(BaseModel):
arn: str
region: str
port: int
protocol: str
protocol: Optional[str]
ssl_policy: Optional[str]
rules: list[ListenerRule]
class LoadBalancerv2(BaseModel):
name: str
dns: str
arn: str
region: str
scheme: str
type: str
access_logs: Optional[str]
desync_mitigation_mode: Optional[str]
deletion_protection: Optional[str]
dns: Optional[str]
drop_invalid_header_fields: Optional[str]
listeners: list[Listenerv2]
scheme: Optional[str]

View File

@@ -97,25 +97,33 @@ class EMR:
slave_node_security_group = cluster_info["Cluster"][
"Ec2InstanceAttributes"
]["EmrManagedSlaveSecurityGroup"]
slave_node_additional_security_groups = cluster_info["Cluster"][
"Ec2InstanceAttributes"
]["AdditionalSlaveSecurityGroups"]
slave_node_additional_security_groups = []
if (
"AdditionalSlaveSecurityGroups"
in cluster_info["Cluster"]["Ec2InstanceAttributes"]
):
slave_node_additional_security_groups = cluster_info["Cluster"][
"Ec2InstanceAttributes"
]["AdditionalSlaveSecurityGroups"]
self.clusters[cluster.id].slave = Node(
security_group_id=slave_node_security_group,
additional_security_groups_id=slave_node_additional_security_groups,
)
# Save MasterPublicDnsName
master_public_dns_name = cluster_info["Cluster"][
master_public_dns_name = cluster_info["Cluster"].get(
"MasterPublicDnsName"
]
)
self.clusters[
cluster.id
].master_public_dns_name = master_public_dns_name
# Set cluster Public/Private
# Public EMR cluster have their DNS ending with .amazonaws.com
# while private ones have format of ip-xxx-xx-xx.us-east-1.compute.internal.
if ".amazonaws.com" in master_public_dns_name:
if (
master_public_dns_name
and ".amazonaws.com" in master_public_dns_name
):
self.clusters[cluster.id].public = True
except Exception as error:

View File

@@ -9,9 +9,7 @@ maximum_expiration_days = 30
class iam_disable_30_days_credentials(Check):
def execute(self) -> Check_Report_AWS:
findings = []
response = iam_client.users
for user in response:
for user in iam_client.users:
report = Check_Report_AWS(self.metadata())
report.resource_id = user.name
report.resource_arn = user.arn
@@ -25,10 +23,10 @@ class iam_disable_30_days_credentials(Check):
)
if time_since_insertion.days > maximum_expiration_days:
report.status = "FAIL"
report.status_extended = f"User {user.name} has not logged in to the console in the past 30 days."
report.status_extended = f"User {user.name} has not logged in to the console in the past {maximum_expiration_days} days."
else:
report.status = "PASS"
report.status_extended = f"User {user.name} has logged in to the console in the past 30 days."
report.status_extended = f"User {user.name} has logged in to the console in the past {maximum_expiration_days} days."
else:
report.status = "PASS"
report.status_extended = (
@@ -38,4 +36,52 @@ class iam_disable_30_days_credentials(Check):
# Append report
findings.append(report)
for user in iam_client.credential_report:
report = Check_Report_AWS(self.metadata())
report.region = iam_client.region
report.resource_id = user["user"]
report.resource_arn = user["arn"]
if (
user["access_key_1_active"] != "true"
and user["access_key_2_active"] != "true"
):
report.status = "PASS"
report.status_extended = (
f"User {user['user']} does not have access keys."
)
else:
old_access_keys = False
if user["access_key_1_active"] == "true":
if user["access_key_1_last_used_date"] != "N/A":
access_key_1_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_1_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_1_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 1 in the last {maximum_expiration_days} days ({access_key_1_last_used_date.days} days)."
if user["access_key_2_active"] == "true":
if user["access_key_2_last_used_date"] != "N/A":
access_key_2_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_2_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_2_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 2 in the last {maximum_expiration_days} days ({access_key_2_last_used_date.days} days)."
if not old_access_keys:
report.status = "PASS"
report.status_extended = f"User {user['user']} does not have unused access keys for {maximum_expiration_days} days."
findings.append(report)
return findings

View File

@@ -9,9 +9,7 @@ maximum_expiration_days = 45
class iam_disable_45_days_credentials(Check):
def execute(self) -> Check_Report_AWS:
findings = []
response = iam_client.users
for user in response:
for user in iam_client.users:
report = Check_Report_AWS(self.metadata())
report.resource_id = user.name
report.resource_arn = user.arn
@@ -38,4 +36,52 @@ class iam_disable_45_days_credentials(Check):
# Append report
findings.append(report)
for user in iam_client.credential_report:
report = Check_Report_AWS(self.metadata())
report.region = iam_client.region
report.resource_id = user["user"]
report.resource_arn = user["arn"]
if (
user["access_key_1_active"] != "true"
and user["access_key_2_active"] != "true"
):
report.status = "PASS"
report.status_extended = (
f"User {user['user']} does not have access keys."
)
else:
old_access_keys = False
if user["access_key_1_active"] == "true":
if user["access_key_1_last_used_date"] != "N/A":
access_key_1_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_1_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_1_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 1 in the last {maximum_expiration_days} days ({access_key_1_last_used_date.days} days)."
if user["access_key_2_active"] == "true":
if user["access_key_2_last_used_date"] != "N/A":
access_key_2_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_2_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_2_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 2 in the last {maximum_expiration_days} days ({access_key_2_last_used_date.days} days)."
if not old_access_keys:
report.status = "PASS"
report.status_extended = f"User {user['user']} does not have unused access keys for {maximum_expiration_days} days."
findings.append(report)
return findings

View File

@@ -9,13 +9,11 @@ maximum_expiration_days = 90
class iam_disable_90_days_credentials(Check):
def execute(self) -> Check_Report_AWS:
findings = []
response = iam_client.users
for user in response:
for user in iam_client.users:
report = Check_Report_AWS(self.metadata())
report.region = iam_client.region
report.resource_id = user.name
report.resource_arn = user.arn
report.region = iam_client.region
if user.password_last_used:
time_since_insertion = (
datetime.datetime.now()
@@ -25,17 +23,65 @@ class iam_disable_90_days_credentials(Check):
)
if time_since_insertion.days > maximum_expiration_days:
report.status = "FAIL"
report.status_extended = f"User {user.name} has not logged in to the console in the past 90 days."
report.status_extended = f"User {user.name} has not logged in to the console in the past {maximum_expiration_days} days."
else:
report.status = "PASS"
report.status_extended = f"User {user.name} has logged in to the console in the past 90 days."
report.status_extended = f"User {user.name} has logged in to the console in the past {maximum_expiration_days} days."
else:
report.status = "PASS"
report.status_extended = (
f"User {user.name} does not have a console password or is unused."
)
# Append report
findings.append(report)
for user in iam_client.credential_report:
report = Check_Report_AWS(self.metadata())
report.region = iam_client.region
report.resource_id = user["user"]
report.resource_arn = user["arn"]
if (
user["access_key_1_active"] != "true"
and user["access_key_2_active"] != "true"
):
report.status = "PASS"
report.status_extended = (
f"User {user['user']} does not have access keys."
)
else:
old_access_keys = False
if user["access_key_1_active"] == "true":
if user["access_key_1_last_used_date"] != "N/A":
access_key_1_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_1_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_1_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 1 in the last {maximum_expiration_days} days ({access_key_1_last_used_date.days} days)."
if user["access_key_2_active"] == "true":
if user["access_key_2_last_used_date"] != "N/A":
access_key_2_last_used_date = (
datetime.datetime.now()
- datetime.datetime.strptime(
user["access_key_2_last_used_date"],
"%Y-%m-%dT%H:%M:%S+00:00",
)
)
if access_key_2_last_used_date.days > maximum_expiration_days:
old_access_keys = True
report.status = "FAIL"
report.status_extended = f"User {user['user']} has not used access key 2 in the last {maximum_expiration_days} days ({access_key_2_last_used_date.days} days)."
if not old_access_keys:
report.status = "PASS"
report.status_extended = f"User {user['user']} does not have unused access keys for {maximum_expiration_days} days."
findings.append(report)
return findings

View File

@@ -12,24 +12,37 @@ class iam_no_custom_policy_permissive_role_assumption(Check):
report.resource_id = policy["PolicyName"]
report.status = "PASS"
report.status_extended = f"Custom Policy {policy['PolicyName']} does not allow permissive STS Role assumption"
if type(policy["PolicyDocument"]["Statement"]) != list:
policy_statements = [policy["PolicyDocument"]["Statement"]]
else:
policy_statements = policy["PolicyDocument"]["Statement"]
for statement in policy_statements:
if (
statement["Effect"] == "Allow"
and "Action" in statement
and (
"sts:AssumeRole" in statement["Action"]
or "sts:*" in statement["Action"]
or "*" in statement["Action"]
)
and "*" in statement["Resource"]
):
report.status = "FAIL"
report.status_extended = f"Custom Policy {policy['PolicyName']} allows permissive STS Role assumption"
break
if policy.get("PolicyDocument"):
if type(policy["PolicyDocument"]["Statement"]) != list:
policy_statements = [policy["PolicyDocument"]["Statement"]]
else:
policy_statements = policy["PolicyDocument"]["Statement"]
for statement in policy_statements:
if (
statement["Effect"] == "Allow"
and "Action" in statement
and "Resource" in statement
and "*" in statement["Resource"]
):
if type(statement["Action"]) == list:
for action in statement["Action"]:
if (
action == "sts:AssumeRole"
or action == "sts:*"
or action == "*"
):
report.status = "FAIL"
report.status_extended = f"Custom Policy {policy['PolicyName']} allows permissive STS Role assumption"
break
else:
if (
statement["Action"] == "sts:AssumeRole"
or statement["Action"] == "sts:*"
or statement["Action"] == "*"
):
report.status = "FAIL"
report.status_extended = f"Custom Policy {policy['PolicyName']} allows permissive STS Role assumption"
break
findings.append(report)

View File

@@ -12,7 +12,7 @@ class iam_password_policy_expires_passwords_within_90_days_or_less(Check):
if iam_client.password_policy:
# Check if password policy expiration exists
if iam_client.password_policy.max_age:
if iam_client.password_policy.max_age < 90:
if iam_client.password_policy.max_age <= 90:
report.status = "PASS"
report.status_extended = f"Password expiration is set lower than 90 days ({iam_client.password_policy.max_age} days)."
else:

Some files were not shown because too many files have changed in this diff Show More