Compare commits
362 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dea3dc51d2 | ||
|
|
e419aa1f1a | ||
|
|
5506547f7f | ||
|
|
568ed72b3e | ||
|
|
e8cc0e6684 | ||
|
|
4331f69395 | ||
|
|
7cc67ae7cb | ||
|
|
244b3438fc | ||
|
|
1a741f7ca0 | ||
|
|
1447800e2b | ||
|
|
f968fe7512 | ||
|
|
0a2349fad7 | ||
|
|
941b8cbc1e | ||
|
|
3b7b16acfd | ||
|
|
fbc7bb68fc | ||
|
|
0d16880596 | ||
|
|
3b5218128f | ||
|
|
cb731bf1db | ||
|
|
7c4d6eb02d | ||
|
|
c14e7fb17a | ||
|
|
fe57811bc5 | ||
|
|
e073b48f7d | ||
|
|
a9df609593 | ||
|
|
6c3db9646e | ||
|
|
ff9c4c717e | ||
|
|
182374b46f | ||
|
|
0871cda526 | ||
|
|
1b47cba37a | ||
|
|
e5bef36905 | ||
|
|
706d723703 | ||
|
|
51eacbfac5 | ||
|
|
5c2a411982 | ||
|
|
08d65cbc41 | ||
|
|
9d2bf429c1 | ||
|
|
d34f863bd4 | ||
|
|
b4abf1c2c7 | ||
|
|
68baaf589e | ||
|
|
be74e41d84 | ||
|
|
848122b0ec | ||
|
|
0edcb7c0d9 | ||
|
|
cc58e06b5e | ||
|
|
0d6ca606ea | ||
|
|
75ee93789f | ||
|
|
05daddafbf | ||
|
|
7bbce6725d | ||
|
|
789b211586 | ||
|
|
826a043748 | ||
|
|
6761048298 | ||
|
|
738fc9acad | ||
|
|
43c0540de7 | ||
|
|
2d1c3d8121 | ||
|
|
f48a5c650d | ||
|
|
66c18eddb8 | ||
|
|
fdd2ee6365 | ||
|
|
c207f60ad8 | ||
|
|
0eaa95c8c0 | ||
|
|
df2fca5935 | ||
|
|
dcaf5d9c7d | ||
|
|
0112969a97 | ||
|
|
3ec0f3d69c | ||
|
|
5555d300a1 | ||
|
|
8155ef4b60 | ||
|
|
a12402f6c8 | ||
|
|
cf28b814cb | ||
|
|
b05f67db19 | ||
|
|
260f4659d5 | ||
|
|
9e700f298c | ||
|
|
56510734c4 | ||
|
|
3938a4d14e | ||
|
|
fa3b9eeeaf | ||
|
|
eb9d6fa25c | ||
|
|
b53307c1c2 | ||
|
|
c3fc708a66 | ||
|
|
b34ffbe6d0 | ||
|
|
f364315e48 | ||
|
|
3ddb5a13a5 | ||
|
|
a24cc399a4 | ||
|
|
305f4b2688 | ||
|
|
9823171d65 | ||
|
|
4761bd8fda | ||
|
|
9c22698723 | ||
|
|
e3892bbcc6 | ||
|
|
629b156f52 | ||
|
|
c45dd47d34 | ||
|
|
ef8831f784 | ||
|
|
c5a42cf5de | ||
|
|
90ebbfc20f | ||
|
|
17cd0dc91d | ||
|
|
fa1f42af59 | ||
|
|
f45ea1ab53 | ||
|
|
0dde3fe483 | ||
|
|
277dc7dd09 | ||
|
|
3215d0b856 | ||
|
|
0167d5efcd | ||
|
|
b48ac808a6 | ||
|
|
616524775c | ||
|
|
5832849b11 | ||
|
|
467c5d01e9 | ||
|
|
24711a2f39 | ||
|
|
24e8286f35 | ||
|
|
e8a1378ad0 | ||
|
|
76bb418ea9 | ||
|
|
cd8770a3e3 | ||
|
|
da834c0935 | ||
|
|
024ffb1117 | ||
|
|
eed7ab9793 | ||
|
|
032feb343f | ||
|
|
eabccba3fa | ||
|
|
d86d656316 | ||
|
|
fa73c91b0b | ||
|
|
2eee50832d | ||
|
|
b40736918b | ||
|
|
ffb1a2e30f | ||
|
|
d6c3c0c6c1 | ||
|
|
ee251721ac | ||
|
|
fdbb9195d5 | ||
|
|
c68b08d9af | ||
|
|
3653bbfca0 | ||
|
|
05c7cc7277 | ||
|
|
5670bf099b | ||
|
|
0c324b0f09 | ||
|
|
968557e38e | ||
|
|
882cdebacb | ||
|
|
07753e1774 | ||
|
|
5b984507fc | ||
|
|
27df481967 | ||
|
|
0943031f23 | ||
|
|
2d95168de0 | ||
|
|
97cae8f92c | ||
|
|
eb213bac92 | ||
|
|
8187788b2c | ||
|
|
c80e08abce | ||
|
|
42fd851e5c | ||
|
|
70e4ebccab | ||
|
|
140f87c741 | ||
|
|
b0d756123e | ||
|
|
6188c92916 | ||
|
|
34c6f96728 | ||
|
|
50fd047c0b | ||
|
|
5bcc05b536 | ||
|
|
ce7d6c8dd5 | ||
|
|
d87a1e28b4 | ||
|
|
227306c572 | ||
|
|
45c2691f89 | ||
|
|
d0c81245b8 | ||
|
|
e494afb1aa | ||
|
|
ecc3c1cf3b | ||
|
|
228b16416a | ||
|
|
17eb74842a | ||
|
|
c01ff74c73 | ||
|
|
f88613b26d | ||
|
|
3464f4241f | ||
|
|
849b703828 | ||
|
|
4b935a40b6 | ||
|
|
5873a23ccb | ||
|
|
eae2786825 | ||
|
|
6407386de5 | ||
|
|
3fe950723f | ||
|
|
52bf6acd46 | ||
|
|
9590e7d7e0 | ||
|
|
7a08140a2d | ||
|
|
d1491cfbd1 | ||
|
|
695b80549d | ||
|
|
11c60a637f | ||
|
|
844ad70bb9 | ||
|
|
5ac7cde577 | ||
|
|
ce3ef0550f | ||
|
|
813f3e7d42 | ||
|
|
d03f97af6b | ||
|
|
019ab0286d | ||
|
|
c6647b4706 | ||
|
|
f913536d88 | ||
|
|
640d1bd176 | ||
|
|
66baccf528 | ||
|
|
6e6dacbace | ||
|
|
cdbb10fb26 | ||
|
|
c34ba3918c | ||
|
|
fa228c876c | ||
|
|
2f4d0af7d7 | ||
|
|
2d3e5235a9 | ||
|
|
8e91ccaa54 | ||
|
|
6955658b36 | ||
|
|
dbb44401fd | ||
|
|
b42ed70c84 | ||
|
|
a28276d823 | ||
|
|
fa4b27dd0e | ||
|
|
0be44d5c49 | ||
|
|
2514596276 | ||
|
|
7008d2a953 | ||
|
|
2539fedfc4 | ||
|
|
b453df7591 | ||
|
|
9e5d5edcba | ||
|
|
2d5de6ff99 | ||
|
|
259e9f1c17 | ||
|
|
daeb53009e | ||
|
|
f12d271ca5 | ||
|
|
965185ca3b | ||
|
|
9c484f6a78 | ||
|
|
de18c3c722 | ||
|
|
9be753b281 | ||
|
|
d6ae122de1 | ||
|
|
c6b90044f2 | ||
|
|
14898b6422 | ||
|
|
26294b0759 | ||
|
|
6da45b5c2b | ||
|
|
674332fddd | ||
|
|
ab8942d05a | ||
|
|
29790b8a5c | ||
|
|
4a4c26ffeb | ||
|
|
25c9bc07b2 | ||
|
|
d22d4c4c83 | ||
|
|
d88640fd20 | ||
|
|
57a2fca3a4 | ||
|
|
f796688c84 | ||
|
|
d6bbf8b7cc | ||
|
|
37ec460f64 | ||
|
|
004b9c95e4 | ||
|
|
86e27b465a | ||
|
|
5e9afddc3a | ||
|
|
de281535b1 | ||
|
|
9df7def14e | ||
|
|
5b9db9795d | ||
|
|
7d2ce7e6ab | ||
|
|
3e807af2b2 | ||
|
|
4c64dc7885 | ||
|
|
e7a7874b34 | ||
|
|
c78a47788b | ||
|
|
922698c5d9 | ||
|
|
8e8a490936 | ||
|
|
231bc0605f | ||
|
|
0298ff9478 | ||
|
|
33a25dcf0e | ||
|
|
54c16e3cdb | ||
|
|
28a978acc2 | ||
|
|
bea26a461f | ||
|
|
ed54c5b8b9 | ||
|
|
13316b68aa | ||
|
|
043986f35b | ||
|
|
2dc4421dd6 | ||
|
|
6c16e2bca2 | ||
|
|
c2b4a8e115 | ||
|
|
63b7bc8794 | ||
|
|
f41ae74ae2 | ||
|
|
98689d223e | ||
|
|
f19cf21146 | ||
|
|
24e19e6b18 | ||
|
|
08376cb15e | ||
|
|
5f6e4663c0 | ||
|
|
9b91c00fcc | ||
|
|
229ab88c2f | ||
|
|
8863d13578 | ||
|
|
e07fc9fbb9 | ||
|
|
0164574fdd | ||
|
|
98eec332d8 | ||
|
|
3d2986fc64 | ||
|
|
29e7f8581e | ||
|
|
4ee3f6c87a | ||
|
|
b8c7440e1f | ||
|
|
d49ff8d9a4 | ||
|
|
07198042bd | ||
|
|
c7a9492e96 | ||
|
|
360c6f3c1c | ||
|
|
89aab4acd5 | ||
|
|
d9b3e842d9 | ||
|
|
3ac4dc8392 | ||
|
|
0d1a5318ec | ||
|
|
94b7a219fd | ||
|
|
ba3eb71abd | ||
|
|
bbc9e11205 | ||
|
|
75571e4266 | ||
|
|
4e879271a0 | ||
|
|
552e0fefc3 | ||
|
|
cb7439a831 | ||
|
|
35d6b8bbc6 | ||
|
|
48b9220ffc | ||
|
|
5537981877 | ||
|
|
711f24a5b2 | ||
|
|
5d2b8bc8aa | ||
|
|
f6ea10db2d | ||
|
|
fc38ba3acb | ||
|
|
0830ad268f | ||
|
|
e633664c2a | ||
|
|
d4c7d9a60a | ||
|
|
5ee0d964f3 | ||
|
|
ba5e0f145f | ||
|
|
34eb9cc063 | ||
|
|
a795fdc40d | ||
|
|
24cba4c4ca | ||
|
|
3d13f4bb9b | ||
|
|
e713d0d321 | ||
|
|
4e34be87a1 | ||
|
|
07307d37a1 | ||
|
|
81463181bc | ||
|
|
02e57927fc | ||
|
|
36925f0dbd | ||
|
|
f9b985e03d | ||
|
|
598ad62b92 | ||
|
|
ea929ab713 | ||
|
|
04e56ced58 | ||
|
|
2278565b86 | ||
|
|
afd0c56b44 | ||
|
|
5ebdf66d22 | ||
|
|
177d8a72a7 | ||
|
|
03ef80dd8e | ||
|
|
6f9825362a | ||
|
|
2167154064 | ||
|
|
f88b35bd80 | ||
|
|
6b9520338e | ||
|
|
438c087856 | ||
|
|
2a43274b06 | ||
|
|
20a9336867 | ||
|
|
c921782714 | ||
|
|
776ac9e3d4 | ||
|
|
d02bd9b717 | ||
|
|
50070e8fe7 | ||
|
|
e3e3b3e279 | ||
|
|
38fba297e8 | ||
|
|
52d65ee4e8 | ||
|
|
9ad2f33dd8 | ||
|
|
02ae23b11d | ||
|
|
70c6d6e7ae | ||
|
|
8efebf992f | ||
|
|
b9be94bcc5 | ||
|
|
e6310c32ac | ||
|
|
654b4702d0 | ||
|
|
262b5a7ee5 | ||
|
|
ef0d4fe34b | ||
|
|
c08342f40c | ||
|
|
e7796268b5 | ||
|
|
0cbe80d2ab | ||
|
|
11d3ba70a0 | ||
|
|
c30e4c4867 | ||
|
|
d1e5087c18 | ||
|
|
618dd442e3 | ||
|
|
7f26fdf2d0 | ||
|
|
64090474e1 | ||
|
|
a69c28713a | ||
|
|
1d4b3095af | ||
|
|
ff75125af8 | ||
|
|
aa0025abbe | ||
|
|
c9436da235 | ||
|
|
12f1eaace7 | ||
|
|
09ef8aba0f | ||
|
|
08c094b8a5 | ||
|
|
e9fb4410cd | ||
|
|
cbdda22a33 | ||
|
|
fe906477da | ||
|
|
b03df619df | ||
|
|
53d89d8d17 | ||
|
|
1e5a1f3e1f | ||
|
|
6efe2979c6 | ||
|
|
92cc2c8e69 | ||
|
|
50dd2e4179 | ||
|
|
7a8fd9c3d3 | ||
|
|
d5a3fc490b | ||
|
|
13f948062b | ||
|
|
b965fda226 | ||
|
|
f9d67f0e9d | ||
|
|
4dfa20e40b | ||
|
|
d5edbaa3a9 | ||
|
|
0cd5ce8c29 | ||
|
|
1c50a87ca2 |
2
.github/CODEOWNERS
vendored
@@ -1 +1 @@
|
||||
* @prowler-cloud/prowler-team
|
||||
* @prowler-cloud/prowler-oss
|
||||
|
||||
52
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,52 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: "[Bug]: "
|
||||
labels: bug, status/needs-triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
Please use this template to create your bug report. By providing as much info as possible you help us understand the issue, reproduce it and resolve it for you quicker. Therefore, take a couple of extra minutes to make sure you have provided all info needed.
|
||||
|
||||
PROTIP: record your screen and attach it as a gif to showcase the issue.
|
||||
|
||||
- How to record and attach gif: https://bit.ly/2Mi8T6K
|
||||
-->
|
||||
|
||||
**What happened?**
|
||||
A clear and concise description of what the bug is or what is not working as expected
|
||||
|
||||
|
||||
**How to reproduce it**
|
||||
Steps to reproduce the behavior:
|
||||
1. What command are you running?
|
||||
2. Cloud provider you are launching
|
||||
3. Environment you have like single account, multi-account, organizations, multi or single subsctiption, etc.
|
||||
4. See error
|
||||
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
|
||||
**Screenshots or Logs**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
Also, you can add logs (anonymize them first!). Here a command that may help to share a log
|
||||
`prowler <your arguments> --log-level DEBUG --log-file $(date +%F)_debug.log` then attach here the log file.
|
||||
|
||||
|
||||
**From where are you running Prowler?**
|
||||
Please, complete the following information:
|
||||
- Resource: (e.g. EC2 instance, Fargate task, Docker container manually, EKS, Cloud9, CodeBuild, workstation, etc.)
|
||||
- OS: [e.g. Amazon Linux 2, Mac, Alpine, Windows, etc. ]
|
||||
- Prowler Version [`./prowler --version`]:
|
||||
- Python version [`python --version`]:
|
||||
- Pip version [`pip --version`]:
|
||||
- Installation method (Are you running it from pip package or cloning the github repo?):
|
||||
- Others:
|
||||
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
97
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
name: 🐞 Bug Report
|
||||
description: Create a report to help us improve
|
||||
title: "[Bug]: "
|
||||
labels: ["bug", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to Reproduce
|
||||
description: Steps to reproduce the behavior
|
||||
placeholder: |-
|
||||
1. What command are you running?
|
||||
2. Cloud provider you are launching
|
||||
3. Environment you have, like single account, multi-account, organizations, multi or single subscription, etc.
|
||||
4. See error
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: Actual Result with Screenshots or Logs
|
||||
description: If applicable, add screenshots to help explain your problem. Also, you can add logs (anonymize them first!). Here a command that may help to share a log `prowler <your arguments> --log-level DEBUG --log-file $(date +%F)_debug.log` then attach here the log file.
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: type
|
||||
attributes:
|
||||
label: How did you install Prowler?
|
||||
options:
|
||||
- Cloning the repository from github.com (git clone)
|
||||
- From pip package (pip install prowler)
|
||||
- From brew (brew install prowler)
|
||||
- Docker (docker pull toniblyx/prowler)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Environment Resource
|
||||
description: From where are you running Prowler?
|
||||
placeholder: |-
|
||||
1. EC2 instance
|
||||
2. Fargate task
|
||||
3. Docker container locally
|
||||
4. EKS
|
||||
5. Cloud9
|
||||
6. CodeBuild
|
||||
7. Workstation
|
||||
8. Other(please specify)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: os
|
||||
attributes:
|
||||
label: OS used
|
||||
description: Which OS are you using?
|
||||
placeholder: |-
|
||||
1. Amazon Linux 2
|
||||
2. MacOS
|
||||
3. Alpine Linux
|
||||
4. Windows
|
||||
5. Other(please specify)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: prowler-version
|
||||
attributes:
|
||||
label: Prowler version
|
||||
description: Which Prowler version are you using?
|
||||
placeholder: |-
|
||||
prowler --version
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: pip-version
|
||||
attributes:
|
||||
label: Pip version
|
||||
description: Which pip version are you using?
|
||||
placeholder: |-
|
||||
pip --version
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
description: Additional context
|
||||
label: Context
|
||||
validations:
|
||||
required: false
|
||||
36
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: 💡 Feature Request
|
||||
description: Suggest an idea for this project
|
||||
labels: ["enhancement", "status/needs-triage"]
|
||||
|
||||
|
||||
body:
|
||||
- type: textarea
|
||||
id: Problem
|
||||
attributes:
|
||||
label: New feature motivation
|
||||
description: Is your feature request related to a problem? Please describe
|
||||
placeholder: |-
|
||||
1. A clear and concise description of what the problem is. Ex. I'm always frustrated when
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Solution
|
||||
attributes:
|
||||
label: Solution Proposed
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Alternatives
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: A clear and concise description of any alternative solutions or features you've considered.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Context
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: Add any other context or screenshots about the feature request here.
|
||||
validations:
|
||||
required: false
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,20 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement, status/needs-triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
2
.github/dependabot.yml
vendored
@@ -8,7 +8,7 @@ updates:
|
||||
- package-ecosystem: "pip" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "daily"
|
||||
interval: "weekly"
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
|
||||
215
.github/workflows/build-lint-push-containers.yml
vendored
@@ -7,115 +7,56 @@ on:
|
||||
paths-ignore:
|
||||
- ".github/**"
|
||||
- "README.md"
|
||||
- "docs/**"
|
||||
|
||||
release:
|
||||
types: [published, edited]
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
AWS_REGION_STG: eu-west-1
|
||||
AWS_REGION_PLATFORM: eu-west-1
|
||||
AWS_REGION_PRO: us-east-1
|
||||
AWS_REGION: us-east-1
|
||||
IMAGE_NAME: prowler
|
||||
LATEST_TAG: latest
|
||||
STABLE_TAG: stable
|
||||
TEMPORARY_TAG: temporary
|
||||
DOCKERFILE_PATH: ./Dockerfile
|
||||
PYTHON_VERSION: 3.9
|
||||
|
||||
jobs:
|
||||
# Lint Dockerfile using Hadolint
|
||||
# dockerfile-linter:
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# -
|
||||
# name: Checkout
|
||||
# uses: actions/checkout@v3
|
||||
# -
|
||||
# name: Install Hadolint
|
||||
# run: |
|
||||
# VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
# grep '"tag_name":' | \
|
||||
# sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
# ) && curl -L -o /tmp/hadolint https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64 \
|
||||
# && chmod +x /tmp/hadolint
|
||||
# -
|
||||
# name: Run Hadolint
|
||||
# run: |
|
||||
# /tmp/hadolint util/Dockerfile
|
||||
|
||||
# Build Prowler OSS container
|
||||
container-build:
|
||||
container-build-push:
|
||||
# needs: dockerfile-linter
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
# Without pushing to registries
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
outputs: type=docker,dest=/tmp/${{ env.IMAGE_NAME }}.tar
|
||||
- name: Share image between jobs
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.IMAGE_NAME }}.tar
|
||||
path: /tmp/${{ env.IMAGE_NAME }}.tar
|
||||
|
||||
# Lint Prowler OSS container using Dockle
|
||||
# container-linter:
|
||||
# needs: container-build
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# -
|
||||
# name: Get container image from shared
|
||||
# uses: actions/download-artifact@v2
|
||||
# with:
|
||||
# name: ${{ env.IMAGE_NAME }}.tar
|
||||
# path: /tmp
|
||||
# -
|
||||
# name: Load Docker image
|
||||
# run: |
|
||||
# docker load --input /tmp/${{ env.IMAGE_NAME }}.tar
|
||||
# docker image ls -a
|
||||
# -
|
||||
# name: Install Dockle
|
||||
# run: |
|
||||
# VERSION=$(curl --silent "https://api.github.com/repos/goodwithtech/dockle/releases/latest" | \
|
||||
# grep '"tag_name":' | \
|
||||
# sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
# ) && curl -L -o dockle.deb https://github.com/goodwithtech/dockle/releases/download/v${VERSION}/dockle_${VERSION}_Linux-64bit.deb \
|
||||
# && sudo dpkg -i dockle.deb && rm dockle.deb
|
||||
# -
|
||||
# name: Run Dockle
|
||||
# run: dockle ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }}
|
||||
|
||||
# Push Prowler OSS container to registries
|
||||
container-push:
|
||||
# needs: container-linter
|
||||
needs: container-build
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read # This is required for actions/checkout
|
||||
steps:
|
||||
- name: Get container image from shared
|
||||
uses: actions/download-artifact@v2
|
||||
- name: Setup python (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
name: ${{ env.IMAGE_NAME }}.tar
|
||||
path: /tmp
|
||||
- name: Load Docker image
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Install dependencies (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
docker load --input /tmp/${{ env.IMAGE_NAME }}.tar
|
||||
docker image ls -a
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Update Prowler version (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
poetry version ${{ github.event.release.tag_name }}
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
@@ -123,70 +64,54 @@ jobs:
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
password: ${{ secrets.PUBLIC_ECR_AWS_SECRET_ACCESS_KEY }}
|
||||
env:
|
||||
AWS_REGION: ${{ env.AWS_REGION_PRO }}
|
||||
- name: Configure AWS Credentials -- STG
|
||||
if: github.event_name == 'push'
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_STG }}
|
||||
role-to-assume: ${{ secrets.STG_IAM_ROLE_ARN }}
|
||||
role-session-name: build-lint-containers-stg
|
||||
- name: Login to ECR -- STG
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ secrets.STG_ECR }}
|
||||
- name: Configure AWS Credentials -- PLATFORM
|
||||
if: github.event_name == 'release'
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_PLATFORM }}
|
||||
role-to-assume: ${{ secrets.STG_IAM_ROLE_ARN }}
|
||||
role-session-name: build-lint-containers-pro
|
||||
- name: Login to ECR -- PLATFORM
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ secrets.PLATFORM_ECR }}
|
||||
- # Push to master branch - push "latest" tag
|
||||
name: Tag (latest)
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PLATFORM_ECR }}/${{ secrets.PLATFORM_ECR_REPOSITORY }}:${{ env.LATEST_TAG }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
- # Push to master branch - push "latest" tag
|
||||
name: Push (latest)
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
docker push ${{ secrets.PLATFORM_ECR }}/${{ secrets.PLATFORM_ECR_REPOSITORY }}:${{ env.LATEST_TAG }}
|
||||
docker push ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
docker push ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
- # Tag the new release (stable and release tag)
|
||||
name: Tag (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PLATFORM_ECR }}/${{ secrets.PLATFORM_ECR_REPOSITORY }}:${{ github.event.release.tag_name }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PLATFORM_ECR }}/${{ secrets.PLATFORM_ECR_REPOSITORY }}:${{ env.STABLE_TAG }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- # Push the new release (stable and release tag)
|
||||
name: Push (release)
|
||||
- name: Build container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
context: .
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
dispatch-action:
|
||||
needs: container-build-push
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get latest commit info
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
LATEST_COMMIT_HASH=$(echo ${{ github.event.after }} | cut -b -7)
|
||||
echo "LATEST_COMMIT_HASH=${LATEST_COMMIT_HASH}" >> $GITHUB_ENV
|
||||
- name: Dispatch event for latest
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" -H "X-GitHub-Api-Version: 2022-11-28" --data '{"event_type":"dispatch","client_payload":{"version":"latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
|
||||
- name: Dispatch event for release
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
docker push ${{ secrets.PLATFORM_ECR }}/${{ secrets.PLATFORM_ECR_REPOSITORY }}:${{ github.event.release.tag_name }}
|
||||
docker push ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
docker push ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
|
||||
docker push ${{ secrets.PLATFORM_ECR }}/${{ secrets.PLATFORM_ECR_REPOSITORY }}:${{ env.STABLE_TAG }}
|
||||
docker push ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
docker push ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
- name: Delete artifacts
|
||||
if: always()
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
with:
|
||||
name: ${{ env.IMAGE_NAME }}.tar
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" -H "X-GitHub-Api-Version: 2022-11-28" --data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ github.event.release.tag_name }}"}}'
|
||||
|
||||
57
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", prowler-2, prowler-3.0-dev ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "master" ]
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
28
.github/workflows/pull-request.yml
vendored
@@ -17,42 +17,48 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install poetry
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install pipenv
|
||||
pipenv install --dev
|
||||
pipenv run pip list
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
- name: Poetry check
|
||||
run: |
|
||||
poetry lock --check
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
pipenv run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
|
||||
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
|
||||
- name: Checking format with black
|
||||
run: |
|
||||
pipenv run black --check .
|
||||
poetry run black --check .
|
||||
- name: Lint with pylint
|
||||
run: |
|
||||
pipenv run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
- name: Bandit
|
||||
run: |
|
||||
pipenv run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
- name: Safety
|
||||
run: |
|
||||
pipenv run safety check
|
||||
poetry run safety check
|
||||
- name: Vulture
|
||||
run: |
|
||||
pipenv run vulture --exclude "contrib" --min-confidence 100 .
|
||||
poetry run vulture --exclude "contrib" --min-confidence 100 .
|
||||
- name: Hadolint
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
pipenv run pytest tests -n auto
|
||||
poetry run pytest tests -n auto
|
||||
|
||||
79
.github/workflows/pypi-release.yml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
name: pypi-release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
GITHUB_BRANCH: master
|
||||
|
||||
jobs:
|
||||
release-prowler-job:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
name: Release Prowler to PyPI
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
cache: 'poetry'
|
||||
- name: Change version and Build package
|
||||
run: |
|
||||
poetry version ${{ env.RELEASE_TAG }}
|
||||
git config user.name "github-actions"
|
||||
git config user.email "<noreply@github.com>"
|
||||
git add prowler/config/config.py pyproject.toml
|
||||
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify
|
||||
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}"
|
||||
git push -f origin ${{ env.RELEASE_TAG }}
|
||||
poetry build
|
||||
- name: Publish prowler package to PyPI
|
||||
run: |
|
||||
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
|
||||
poetry publish
|
||||
# Create pull request with new version
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
commit-message: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}."
|
||||
branch: release-${{ env.RELEASE_TAG }}
|
||||
labels: "status/waiting-for-revision, severity/low"
|
||||
title: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
This PR updates Prowler Version to ${{ env.RELEASE_TAG }}.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
- name: Replicate PyPi Package
|
||||
run: |
|
||||
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
|
||||
pip install toml
|
||||
python util/replicate_pypi_package.py
|
||||
poetry build
|
||||
- name: Publish prowler-cloud package to PyPI
|
||||
run: |
|
||||
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
|
||||
poetry publish
|
||||
# Create pull request to github.com/Homebrew/homebrew-core to update prowler formula
|
||||
- name: Bump Homebrew formula
|
||||
uses: mislav/bump-homebrew-formula-action@v2
|
||||
with:
|
||||
formula-name: prowler
|
||||
base-branch: release-${{ env.RELEASE_TAG }}
|
||||
env:
|
||||
COMMITTER_TOKEN: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
@@ -19,6 +19,7 @@ jobs:
|
||||
permissions:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
@@ -51,11 +52,11 @@ jobs:
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services."
|
||||
branch: "aws-services-regions-updated"
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low"
|
||||
title: "feat(regions_update): Changes in regions for AWS services."
|
||||
title: "chore(regions_update): Changes in regions for AWS services."
|
||||
body: |
|
||||
### Description
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.3.0
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
@@ -13,14 +13,22 @@ repos:
|
||||
- id: pretty-format-json
|
||||
args: ["--autofix", --no-sort-keys, --no-ensure-ascii]
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.7.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
files: pyproject.toml
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.8.0
|
||||
rev: v0.9.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v1.7.7
|
||||
rev: v2.0.1
|
||||
hooks:
|
||||
- id: autoflake
|
||||
args:
|
||||
@@ -31,30 +39,31 @@ repos:
|
||||
]
|
||||
|
||||
- repo: https://github.com/timothycrosley/isort
|
||||
rev: 5.10.1
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
args: ["--profile", "black"]
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.10.0
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 5.0.4
|
||||
rev: 6.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: contrib
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/haizaar/check-pipfile-lock
|
||||
rev: v0.0.5
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.4.0 # add version here
|
||||
hooks:
|
||||
- id: check-pipfile-lock
|
||||
- id: poetry-check
|
||||
- id: poetry-lock
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.12.0
|
||||
rev: v2.12.1-beta
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013"]
|
||||
|
||||
23
.readthedocs.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
# .readthedocs.yaml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
tools:
|
||||
python: "3.9"
|
||||
jobs:
|
||||
post_create_environment:
|
||||
# Install poetry
|
||||
# https://python-poetry.org/docs/#installing-manually
|
||||
- pip install poetry
|
||||
# Tell poetry to not use a virtual environment
|
||||
- poetry config virtualenvs.create false
|
||||
post_install:
|
||||
- poetry install -E docs
|
||||
|
||||
mkdocs:
|
||||
configuration: mkdocs.yml
|
||||
@@ -16,6 +16,7 @@ USER prowler
|
||||
WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler
|
||||
|
||||
# Install dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
@@ -24,4 +25,9 @@ ENV PATH="$HOME/.local/bin:$PATH"
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir .
|
||||
|
||||
# Remove Prowler directory and build files
|
||||
USER 0
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
|
||||
|
||||
USER prowler
|
||||
ENTRYPOINT ["prowler"]
|
||||
|
||||
4
Makefile
@@ -24,11 +24,11 @@ lint: ## Lint Code
|
||||
|
||||
##@ PyPI
|
||||
pypi-clean: ## Delete the distribution files
|
||||
rm -rf ./dist && rm -rf ./build && rm -rf prowler_cloud.egg-info
|
||||
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
|
||||
|
||||
pypi-build: ## Build package
|
||||
$(MAKE) pypi-clean && \
|
||||
python3 -m build
|
||||
poetry build
|
||||
|
||||
pypi-upload: ## Upload package
|
||||
python3 -m twine upload --repository pypi dist/*
|
||||
|
||||
41
Pipfile
@@ -1,41 +0,0 @@
|
||||
[[source]]
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
colorama = "0.4.4"
|
||||
boto3 = "1.26.3"
|
||||
arnparse = "0.0.2"
|
||||
botocore = "1.27.8"
|
||||
pydantic = "1.9.1"
|
||||
shodan = "1.28.0"
|
||||
detect-secrets = "1.4.0"
|
||||
alive-progress = "2.4.1"
|
||||
tabulate = "0.9.0"
|
||||
azure-identity = "1.12.0"
|
||||
azure-storage-blob = "12.14.1"
|
||||
msgraph-core = "0.2.2"
|
||||
azure-mgmt-subscription = "3.1.1"
|
||||
azure-mgmt-authorization = "3.0.0"
|
||||
azure-mgmt-security = "3.0.0"
|
||||
azure-mgmt-storage = "21.0.0"
|
||||
|
||||
[dev-packages]
|
||||
black = "22.10.0"
|
||||
pylint = "2.15.9"
|
||||
flake8 = "5.0.4"
|
||||
bandit = "1.7.4"
|
||||
safety = "2.3.1"
|
||||
vulture = "2.6"
|
||||
moto = "4.0.13"
|
||||
docker = "6.0.0"
|
||||
openapi-spec-validator = "0.5.1"
|
||||
pytest = "7.1.2"
|
||||
pytest-xdist = "2.5.0"
|
||||
coverage = "7.0.3"
|
||||
sure = "2.0.0"
|
||||
freezegun = "1.2.1"
|
||||
|
||||
[requires]
|
||||
python_version = "3.9"
|
||||
1511
Pipfile.lock
generated
62
README.md
@@ -11,12 +11,15 @@
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
|
||||
<a href="https://pypi.org/project/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler-cloud.svg"></a>
|
||||
<a href="https://pypi.python.org/pypi/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler-cloud.svg"></a>
|
||||
<a href="https://pypi.org/project/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
|
||||
<a href="https://pypi.python.org/pypi/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
|
||||
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=prowler%20downloads"></a>
|
||||
<a href="https://pypistats.org/packages/prowler-cloud"><img alt="PyPI Prowler-Cloud Downloads" src="https://img.shields.io/pypi/dw/prowler-cloud.svg?label=prowler-cloud%20downloads"></a>
|
||||
<a href="https://formulae.brew.sh/formula/prowler#default"><img alt="Brew Prowler Downloads" src="https://img.shields.io/homebrew/installs/dm/prowler?label=brew%20downloads"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/toniblyx/prowler"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/cloud/build/toniblyx/prowler"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/image-size/toniblyx/prowler"></a>
|
||||
<a href="https://gallery.ecr.aws/o4g1s5r6/prowler"><img width="120" height=19" alt="AWS ECR Gallery" src="https://user-images.githubusercontent.com/3985464/151531396-b6535a68-c907-44eb-95a1-a09508178616.png"></a>
|
||||
<a href="https://gallery.ecr.aws/prowler-cloud/prowler"><img width="120" height=19" alt="AWS ECR Gallery" src="https://user-images.githubusercontent.com/3985464/151531396-b6535a68-c907-44eb-95a1-a09508178616.png"></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
|
||||
@@ -31,21 +34,27 @@
|
||||
|
||||
# Description
|
||||
|
||||
`Prowler` is an Open Source security tool to perform AWS and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
|
||||
`Prowler` is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
|
||||
|
||||
It contains hundreds of controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
The full documentation can now be found at [https://docs.prowler.cloud](https://docs.prowler.cloud)
|
||||
|
||||
## Looking for Prowler v2 documentation?
|
||||
For Prowler v2 Documentation, please go to https://github.com/prowler-cloud/prowler/tree/2.12.1.
|
||||
|
||||
# ⚙️ Install
|
||||
|
||||
## Pip package
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9:
|
||||
|
||||
```console
|
||||
pip install prowler-cloud
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
More details at https://docs.prowler.cloud
|
||||
|
||||
## Containers
|
||||
|
||||
@@ -58,25 +67,20 @@ The available versions of Prowler are the following:
|
||||
The container images are available here:
|
||||
|
||||
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/o4g1s5r6/prowler)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
|
||||
|
||||
## From Github
|
||||
|
||||
Python >= 3.9 is required with pip and pipenv:
|
||||
Python >= 3.9 is required with pip and poetry:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
pipenv shell
|
||||
pipenv install
|
||||
poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
```
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
The full documentation now can be found at [https://docs.prowler.cloud](https://docs.prowler.cloud)
|
||||
|
||||
|
||||
# 📐✏️ High level architecture
|
||||
|
||||
You can run Prowler from your workstation, an EC2 instance, Fargate or any other container, Codebuild, CloudShell and Cloud9.
|
||||
@@ -112,6 +116,22 @@ Those credentials must be associated to a user or role with proper permissions t
|
||||
|
||||
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
|
||||
|
||||
## Google Cloud Platform
|
||||
|
||||
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
|
||||
|
||||
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
|
||||
|
||||
- Viewer
|
||||
- Security Reviewer
|
||||
- Stackdriver Account Viewer
|
||||
|
||||
> `prowler` will scan the project associated with the credentials.
|
||||
|
||||
## Azure
|
||||
|
||||
Prowler for Azure supports the following authentication types:
|
||||
@@ -123,7 +143,7 @@ Those credentials must be associated to a user or role with proper permissions t
|
||||
|
||||
### Service Principal authentication
|
||||
|
||||
To allow Prowler assume the service principal identity to start the scan it is needed to configure the following environment variables:
|
||||
To allow Prowler assume the service principal identity to start the scan, it is needed to configure the following environment variables:
|
||||
|
||||
```console
|
||||
export AZURE_CLIENT_ID="XXXXXXXXX"
|
||||
@@ -134,11 +154,11 @@ export AZURE_CLIENT_SECRET="XXXXXXX"
|
||||
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
|
||||
### AZ CLI / Browser / Managed Identity authentication
|
||||
|
||||
The other three cases does not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options, `--browser-auth` needs the user to authenticate using the default browser to start the scan.
|
||||
The other three cases do not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options, `--browser-auth` needs the user to authenticate using the default browser to start the scan.
|
||||
|
||||
### Permissions
|
||||
|
||||
To use each one you need to pass the proper flag to the execution. Prowler for Azure handles two types of permission scopes, which are:
|
||||
To use each one, you need to pass the proper flag to the execution. Prowler for Azure handles two types of permission scopes, which are:
|
||||
|
||||
- **Azure Active Directory permissions**: Used to retrieve metadata from the identity assumed by Prowler and future AAD checks (not mandatory to have access to execute the tool)
|
||||
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool.
|
||||
@@ -225,6 +245,14 @@ prowler aws --profile custom-profile -f us-east-1 eu-south-2
|
||||
```
|
||||
> By default, `prowler` will scan all AWS regions.
|
||||
|
||||
## Google Cloud Platform
|
||||
|
||||
Optionally, you can provide the location of an application credential JSON file with the following argument:
|
||||
|
||||
```console
|
||||
prowler gcp --credentials-file path
|
||||
```
|
||||
|
||||
## Azure
|
||||
|
||||
With Azure you need to specify which auth method is going to be used:
|
||||
|
||||
23
SECURITY.md
Normal file
@@ -0,0 +1,23 @@
|
||||
# Security Policy
|
||||
|
||||
## Software Security
|
||||
As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (FTR)](https://aws.amazon.com/partners/foundational-technical-review/) and we use the following tools and automation to make sure our code is secure and dependencies up-to-dated:
|
||||
|
||||
- `bandit` for code security review.
|
||||
- `safety` and `dependabot` for dependencies.
|
||||
- `hadolint` and `dockle` for our containers security.
|
||||
- `snyk` in Docker Hub.
|
||||
- `clair` in Amazon ECR.
|
||||
- `vulture`, `flake8`, `black` and `pylint` for formatting and best practices.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to help@prowler.pro.
|
||||
|
||||
The information you share with Verica as part of this process is kept confidential within Verica and the Prowler team. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
|
||||
|
||||
We will review the submitted report, and assign it a tracking number. We will then respond to you, acknowledging receipt of the report, and outline the next steps in the process.
|
||||
|
||||
You will receive a non-automated response to your initial contact within 24 hours, confirming receipt of your reported vulnerability.
|
||||
|
||||
We will coordinate public notification of any validated vulnerability with you. Where possible, we prefer that our respective public disclosures be posted simultaneously.
|
||||
@@ -1,43 +0,0 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"CheckTitle": "Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to SSH port 22.",
|
||||
"CheckType": "Data Protection",
|
||||
"ServiceName": "ec2",
|
||||
"SubServiceName": "securitygroup",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "low",
|
||||
"ResourceType": "AwsEc2SecurityGroup",
|
||||
"Description": "Extended Description",
|
||||
"Risk": "If Security groups are not properly configured the attack surface is increased.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cli command or URL to the cli command location.",
|
||||
"NativeIaC": "code or URL to the code location.",
|
||||
"Other": "cli command or URL to the cli command location.",
|
||||
"Terraform": "code or URL to the code location."
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Use a Zero Trust approach. Narrow ingress traffic as much as possible. Consider north-south as well as east-west traffic.",
|
||||
"Url": "https://docs.aws.amazon.com/vpc/latest/userguide/vpc-security-best-practices.html"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"cat1",
|
||||
"cat2"
|
||||
],
|
||||
"Tags": {
|
||||
"Tag1Key": "value",
|
||||
"Tag2Key": "value"
|
||||
},
|
||||
"DependsOn": [
|
||||
"othercheck1",
|
||||
"othercheck2"
|
||||
],
|
||||
"RelatedTo": [
|
||||
"othercheck3",
|
||||
"othercheck4"
|
||||
],
|
||||
"Notes": "additional information"
|
||||
}
|
||||
398
contrib/codebuild/codebuild-prowlerv3-audit-account-cfn.yaml
Normal file
@@ -0,0 +1,398 @@
|
||||
---
|
||||
AWSTemplateFormatVersion: 2010-09-09
|
||||
Description: Creates a CodeBuild project to audit an AWS account with Prowler Version 2 and stores the html report in a S3 bucket. This will run onece at the beginning and on a schedule afterwards. Partial contribution from https://github.com/stevecjones
|
||||
Parameters:
|
||||
ServiceName:
|
||||
Description: 'Specifies the service name used within component naming'
|
||||
Type: String
|
||||
Default: 'prowler'
|
||||
|
||||
LogsRetentionInDays:
|
||||
Description: 'Specifies the number of days you want to retain CodeBuild run log events in the specified log group. Junit reports are kept for 30 days, HTML reports in S3 are not deleted'
|
||||
Type: Number
|
||||
Default: 3
|
||||
AllowedValues: [1, 3, 5, 7, 14, 30, 60, 90, 180, 365]
|
||||
|
||||
ProwlerOptions:
|
||||
Description: 'Options to pass to Prowler command, use -f to filter specific regions, -c for specific checks, -s for specific services, for SecurityHub integration use "-f shub_region -S", for more options see -h. For a complete assessment leave this empty.'
|
||||
Type: String
|
||||
# Prowler command below runs a set of checks, configure it base on your needs, no options will run all regions all checks.
|
||||
Default: -f eu-west-1 -s s3 iam ec2
|
||||
|
||||
ProwlerScheduler:
|
||||
Description: The time when Prowler will run in cron format. Default is daily at 22:00h or 10PM 'cron(0 22 * * ? *)', for every 5 hours also works 'rate(5 hours)'. More info here https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html.
|
||||
Type: String
|
||||
Default: 'cron(0 22 * * ? *)'
|
||||
|
||||
Resources:
|
||||
CodeBuildStartBuild:
|
||||
Type: 'Custom::CodeBuildStartBuild'
|
||||
DependsOn:
|
||||
- CodeBuildLogPolicy
|
||||
- CodeBuildStartLogPolicy
|
||||
Properties:
|
||||
Build: !Ref ProwlerCodeBuild
|
||||
ServiceToken: !GetAtt CodeBuildStartBuildLambda.Arn
|
||||
|
||||
CodeBuildStartBuildLambdaRole:
|
||||
Type: 'AWS::IAM::Role'
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Principal:
|
||||
Service: !Sub lambda.${AWS::URLSuffix}
|
||||
Action: 'sts:AssumeRole'
|
||||
Description: !Sub 'DO NOT DELETE - Used by Lambda. Created by CloudFormation Stack ${AWS::StackId}'
|
||||
Policies:
|
||||
- PolicyName: StartBuildInline
|
||||
PolicyDocument:
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action: 'codebuild:StartBuild'
|
||||
Resource: !GetAtt ProwlerCodeBuild.Arn
|
||||
|
||||
CodeBuildStartBuildLambda:
|
||||
Type: 'AWS::Lambda::Function'
|
||||
Metadata:
|
||||
cfn_nag:
|
||||
rules_to_suppress:
|
||||
- id: W58
|
||||
reason: 'This Lambda has permissions to write Logs'
|
||||
- id: W89
|
||||
reason: 'VPC is not needed'
|
||||
- id: W92
|
||||
reason: 'ReservedConcurrentExecutions not needed'
|
||||
Properties:
|
||||
Handler: index.lambda_handler
|
||||
MemorySize: 128
|
||||
Role: !Sub ${CodeBuildStartBuildLambdaRole.Arn}
|
||||
Timeout: 120
|
||||
Runtime: python3.9
|
||||
Code:
|
||||
ZipFile: |
|
||||
import boto3
|
||||
import cfnresponse
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
def lambda_handler(event,context):
|
||||
props = event['ResourceProperties']
|
||||
codebuild_client = boto3.client('codebuild')
|
||||
|
||||
if (event['RequestType'] == 'Create' or event['RequestType'] == 'Update'):
|
||||
try:
|
||||
response = codebuild_client.start_build(projectName=props['Build'])
|
||||
print(response)
|
||||
print("Respond: SUCCESS")
|
||||
cfnresponse.send(event, context, cfnresponse.SUCCESS, {})
|
||||
except Exception as ex:
|
||||
print(ex.response['Error']['Message'])
|
||||
cfnresponse.send(event, context, cfnresponse.FAILED, ex.response)
|
||||
else:
|
||||
cfnresponse.send(event, context, cfnresponse.SUCCESS, {})
|
||||
|
||||
CodeBuildStartLogGroup:
|
||||
Type: 'AWS::Logs::LogGroup'
|
||||
DeletionPolicy: Delete
|
||||
UpdateReplacePolicy: Delete
|
||||
Metadata:
|
||||
cfn_nag:
|
||||
rules_to_suppress:
|
||||
- id: W84
|
||||
reason: 'KMS encryption is not needed.'
|
||||
Properties:
|
||||
LogGroupName: !Sub '/aws/lambda/${CodeBuildStartBuildLambda}'
|
||||
RetentionInDays: !Ref LogsRetentionInDays
|
||||
|
||||
CodeBuildStartLogPolicy:
|
||||
Type: AWS::IAM::Policy
|
||||
Properties:
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action:
|
||||
- logs:CreateLogStream
|
||||
- logs:PutLogEvents
|
||||
Effect: Allow
|
||||
Resource: !GetAtt CodeBuildStartLogGroup.Arn
|
||||
PolicyName: LogGroup
|
||||
Roles:
|
||||
- !Ref CodeBuildStartBuildLambdaRole
|
||||
|
||||
ArtifactBucket:
|
||||
Type: AWS::S3::Bucket
|
||||
Metadata:
|
||||
cfn_nag:
|
||||
rules_to_suppress:
|
||||
- id: W35
|
||||
reason: 'S3 Access Logging is not needed'
|
||||
Properties:
|
||||
Tags:
|
||||
- Key: Name
|
||||
Value: !Sub '${ServiceName}-${AWS::AccountId}-S3-Prowler-${AWS::StackName}'
|
||||
BucketName: !Sub '${ServiceName}-reports-${AWS::Region}-prowler-${AWS::AccountId}'
|
||||
AccessControl: LogDeliveryWrite
|
||||
VersioningConfiguration:
|
||||
Status: Enabled
|
||||
BucketEncryption:
|
||||
ServerSideEncryptionConfiguration:
|
||||
- ServerSideEncryptionByDefault:
|
||||
SSEAlgorithm: AES256
|
||||
PublicAccessBlockConfiguration:
|
||||
BlockPublicAcls: true
|
||||
BlockPublicPolicy: true
|
||||
IgnorePublicAcls: true
|
||||
RestrictPublicBuckets: true
|
||||
|
||||
ArtifactBucketPolicy:
|
||||
Type: AWS::S3::BucketPolicy
|
||||
Properties:
|
||||
Bucket: !Ref ArtifactBucket
|
||||
PolicyDocument:
|
||||
Id: Content
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action: '*'
|
||||
Condition:
|
||||
Bool:
|
||||
aws:SecureTransport: false
|
||||
Effect: Deny
|
||||
Principal: '*'
|
||||
Resource: !Sub '${ArtifactBucket.Arn}/*'
|
||||
Sid: S3ForceSSL
|
||||
- Action: 's3:PutObject'
|
||||
Condition:
|
||||
'Null':
|
||||
s3:x-amz-server-side-encryption: true
|
||||
Effect: Deny
|
||||
Principal: '*'
|
||||
Resource: !Sub '${ArtifactBucket.Arn}/*'
|
||||
Sid: DenyUnEncryptedObjectUploads
|
||||
|
||||
CodeBuildServiceRole:
|
||||
Type: AWS::IAM::Role
|
||||
Metadata:
|
||||
cfn_nag:
|
||||
rules_to_suppress:
|
||||
- id: W11
|
||||
reason: 'Role complies with the least privilege principle.'
|
||||
Properties:
|
||||
Description: !Sub 'DO NOT DELETE - Used by CodeBuild. Created by CloudFormation Stack ${AWS::StackId}'
|
||||
ManagedPolicyArns:
|
||||
- !Sub 'arn:${AWS::Partition}:iam::aws:policy/job-function/SupportUser'
|
||||
- !Sub 'arn:${AWS::Partition}:iam::aws:policy/job-function/ViewOnlyAccess'
|
||||
- !Sub 'arn:${AWS::Partition}:iam::aws:policy/SecurityAudit'
|
||||
AssumeRolePolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action: 'sts:AssumeRole'
|
||||
Effect: Allow
|
||||
Principal:
|
||||
Service: !Sub codebuild.${AWS::URLSuffix}
|
||||
Policies:
|
||||
- PolicyName: S3
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action:
|
||||
- s3:PutObject
|
||||
- s3:GetObject
|
||||
- s3:GetObjectVersion
|
||||
- s3:GetBucketAcl
|
||||
- s3:GetBucketLocation
|
||||
Effect: Allow
|
||||
Resource: !Sub '${ArtifactBucket.Arn}/*'
|
||||
- PolicyName: ProwlerAdditions
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action:
|
||||
- account:Get*
|
||||
- appstream:Describe*
|
||||
- codeartifact:List*
|
||||
- codebuild:BatchGet*
|
||||
- ds:Get*
|
||||
- ds:Describe*
|
||||
- ds:List*
|
||||
- ec2:GetEbsEncryptionByDefault
|
||||
- ecr:Describe*
|
||||
- elasticfilesystem:DescribeBackupPolicy
|
||||
- glue:GetConnections
|
||||
- glue:GetSecurityConfiguration*
|
||||
- glue:SearchTables
|
||||
- lambda:GetFunction*
|
||||
- macie2:GetMacieSession
|
||||
- s3:GetAccountPublicAccessBlock
|
||||
- s3:GetPublicAccessBlock
|
||||
- shield:DescribeProtection
|
||||
- shield:GetSubscriptionState
|
||||
- securityhub:BatchImportFindings
|
||||
- securityhub:GetFindings
|
||||
- ssm:GetDocument
|
||||
- support:Describe*
|
||||
- tag:GetTagKeys
|
||||
Effect: Allow
|
||||
Resource: '*'
|
||||
- PolicyName: ProwlerAdditionsApiGW
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action:
|
||||
- apigateway:GET
|
||||
Effect: Allow
|
||||
Resource: 'arn:aws:apigateway:*::/restapis/*'
|
||||
- PolicyName: CodeBuild
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action:
|
||||
- codebuild:CreateReportGroup
|
||||
- codebuild:CreateReport
|
||||
- codebuild:UpdateReport
|
||||
- codebuild:BatchPutTestCases
|
||||
- codebuild:BatchPutCodeCoverages
|
||||
Effect: Allow
|
||||
Resource: !Sub 'arn:${AWS::Partition}:codebuild:${AWS::Region}:${AWS::AccountId}:report-group/*'
|
||||
- PolicyName: SecurityHubBatchImportFindings
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action: securityhub:BatchImportFindings
|
||||
Effect: Allow
|
||||
Resource: !Sub 'arn:${AWS::Partition}:securityhub:${AWS::Region}::product/prowler/prowler'
|
||||
|
||||
CodeBuildLogPolicy:
|
||||
Type: AWS::IAM::Policy
|
||||
Properties:
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action:
|
||||
- logs:CreateLogStream
|
||||
- logs:PutLogEvents
|
||||
Effect: Allow
|
||||
Resource: !GetAtt ProwlerLogGroup.Arn
|
||||
PolicyName: LogGroup
|
||||
Roles:
|
||||
- !Ref CodeBuildServiceRole
|
||||
|
||||
CodeBuildAssumePolicy:
|
||||
Type: AWS::IAM::Policy
|
||||
Properties:
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action: 'sts:AssumeRole'
|
||||
Effect: Allow
|
||||
Resource: !GetAtt CodeBuildServiceRole.Arn
|
||||
PolicyName: AssumeRole
|
||||
Roles:
|
||||
- !Ref CodeBuildServiceRole
|
||||
|
||||
ProwlerCodeBuild:
|
||||
Type: AWS::CodeBuild::Project
|
||||
Metadata:
|
||||
cfn_nag:
|
||||
rules_to_suppress:
|
||||
- id: W32
|
||||
reason: 'KMS encryption is not needed.'
|
||||
Properties:
|
||||
Artifacts:
|
||||
Type: NO_ARTIFACTS
|
||||
ConcurrentBuildLimit: 1
|
||||
Source:
|
||||
Type: NO_SOURCE
|
||||
BuildSpec: |
|
||||
version: 0.2
|
||||
phases:
|
||||
install:
|
||||
runtime-versions:
|
||||
python: 3.9
|
||||
commands:
|
||||
- echo "Installing Prowler..."
|
||||
- pip3 install prowler
|
||||
build:
|
||||
commands:
|
||||
- echo "Running Prowler as prowler $PROWLER_OPTIONS"
|
||||
- prowler $PROWLER_OPTIONS
|
||||
post_build:
|
||||
commands:
|
||||
- echo "Uploading reports to S3..."
|
||||
- aws s3 cp --sse AES256 output/ s3://$BUCKET_REPORT/ --recursive
|
||||
- echo "Done!"
|
||||
# Currently not supported in Version 3
|
||||
# reports:
|
||||
# prowler:
|
||||
# files:
|
||||
# - '**/*'
|
||||
# base-directory: 'junit-reports'
|
||||
# file-format: JunitXml
|
||||
Environment:
|
||||
# AWS CodeBuild free tier includes 100 build minutes of BUILD_GENERAL1_SMALL per month.
|
||||
# BUILD_GENERAL1_SMALL: Use up to 3 GB memory and 2 vCPUs for builds. $0.005/minute.
|
||||
# BUILD_GENERAL1_MEDIUM: Use up to 7 GB memory and 4 vCPUs for builds. $0.01/minute.
|
||||
# BUILD_GENERAL1_LARGE: Use up to 15 GB memory and 8 vCPUs for builds. $0.02/minute.
|
||||
# BUILD_GENERAL1_2XLARGE: Use up to 144 GB memory and 72 vCPUs for builds. $0.20/minute.
|
||||
ComputeType: "BUILD_GENERAL1_SMALL"
|
||||
Image: "aws/codebuild/amazonlinux2-x86_64-standard:3.0"
|
||||
Type: "LINUX_CONTAINER"
|
||||
EnvironmentVariables:
|
||||
- Name: BUCKET_REPORT
|
||||
Value: !Ref ArtifactBucket
|
||||
Type: PLAINTEXT
|
||||
- Name: PROWLER_OPTIONS
|
||||
Value: !Ref ProwlerOptions
|
||||
Type: PLAINTEXT
|
||||
Description: Run Prowler assessment
|
||||
ServiceRole: !GetAtt CodeBuildServiceRole.Arn
|
||||
TimeoutInMinutes: 300
|
||||
|
||||
ProwlerLogGroup:
|
||||
Type: 'AWS::Logs::LogGroup'
|
||||
DeletionPolicy: Delete
|
||||
UpdateReplacePolicy: Delete
|
||||
Metadata:
|
||||
cfn_nag:
|
||||
rules_to_suppress:
|
||||
- id: W84
|
||||
reason: 'KMS encryption is not needed.'
|
||||
Properties:
|
||||
LogGroupName: !Sub '/aws/codebuild/${ProwlerCodeBuild}'
|
||||
RetentionInDays: !Ref LogsRetentionInDays
|
||||
|
||||
EventBridgeServiceRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
Description: !Sub 'DO NOT DELETE - Used by EventBridge. Created by CloudFormation Stack ${AWS::StackId}'
|
||||
AssumeRolePolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action: 'sts:AssumeRole'
|
||||
Effect: Allow
|
||||
Principal:
|
||||
Service: !Sub events.${AWS::URLSuffix}
|
||||
Policies:
|
||||
- PolicyName: CodeBuild
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action: 'codebuild:StartBuild'
|
||||
Resource: !GetAtt ProwlerCodeBuild.Arn
|
||||
|
||||
ProwlerSchedule:
|
||||
Type: 'AWS::Events::Rule'
|
||||
Properties:
|
||||
Description: A schedule to trigger Prowler in CodeBuild
|
||||
ScheduleExpression: !Ref ProwlerScheduler
|
||||
State: ENABLED
|
||||
Targets:
|
||||
- Arn: !GetAtt ProwlerCodeBuild.Arn
|
||||
Id: ProwlerSchedule
|
||||
RoleArn: !GetAtt EventBridgeServiceRole.Arn
|
||||
|
||||
Outputs:
|
||||
ArtifactBucketName:
|
||||
Description: Artifact Bucket Name
|
||||
Value: !Ref ArtifactBucket
|
||||
@@ -5,20 +5,20 @@ hide:
|
||||
# About
|
||||
|
||||
## Author
|
||||
Prowler was created by **Toni de la Fuente** in 2016.
|
||||
Prowler was created by **Toni de la Fuente** in 2016.
|
||||
|
||||
| <br>[](https://twitter.com/toniblyx) [](https://twitter.com/prowlercloud)|
|
||||
| <br>[](https://twitter.com/toniblyx) [](https://twitter.com/prowlercloud)|
|
||||
|:--:|
|
||||
| <b>Toni de la Fuente </b>|
|
||||
|
||||
## Maintainers
|
||||
Prowler is maintained by the Engineers of the **Prowler Team** :
|
||||
|
||||
| [](https://twitter.com/NachoRivCor) | [](https://twitter.com/sergargar1) |[](https://twitter.com/jfagoagas) |
|
||||
| [](https://twitter.com/NachoRivCor) | [](https://twitter.com/sergargar1) |[](https://twitter.com/jfagoagas) |
|
||||
|:--:|:--:|:--:
|
||||
| <b>Nacho Rivera</b>| <b>Sergio Garcia</b>| <b>Pepe Fagoaga</b>|
|
||||
|
||||
## License
|
||||
|
||||
Prowler is licensed as **Apache License 2.0** as specified in each file. You may obtain a copy of the License at
|
||||
<http://www.apache.org/licenses/LICENSE-2.0>
|
||||
<http://www.apache.org/licenses/LICENSE-2.0>
|
||||
|
||||
@@ -26,9 +26,27 @@ Those credentials must be associated to a user or role with proper permissions t
|
||||
- arn:aws:iam::aws:policy/SecurityAudit
|
||||
- arn:aws:iam::aws:policy/job-function/ViewOnlyAccess
|
||||
|
||||
> Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/iam/prowler-additions-policy.json) to the role you are using.
|
||||
> Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using.
|
||||
|
||||
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/iam/prowler-security-hub.json).
|
||||
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
|
||||
|
||||
## Google Cloud
|
||||
|
||||
### GCP Authentication
|
||||
|
||||
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
|
||||
|
||||
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
|
||||
|
||||
- Viewer
|
||||
- Security Reviewer
|
||||
- Stackdriver Account Viewer
|
||||
|
||||
> `prowler` will scan the project associated with the credentials.
|
||||
|
||||
## Azure
|
||||
|
||||
|
||||
BIN
docs/img/output-html.png
Normal file
|
After Width: | Height: | Size: 631 KiB |
BIN
docs/img/quick-inventory.jpg
Normal file
|
After Width: | Height: | Size: 320 KiB |
|
Before Width: | Height: | Size: 220 KiB |
@@ -5,7 +5,7 @@
|
||||
|
||||
# Prowler Documentation
|
||||
|
||||
**Welcome to [Prowler Open Source v3](https://github.com/prowler-cloud/prowler/) Documentation!** 📄
|
||||
**Welcome to [Prowler Open Source v3](https://github.com/prowler-cloud/prowler/) Documentation!** 📄
|
||||
|
||||
For **Prowler v2 Documentation**, please go [here](https://github.com/prowler-cloud/prowler/tree/2.12.0) to the branch and its README.md.
|
||||
|
||||
@@ -16,7 +16,7 @@ For **Prowler v2 Documentation**, please go [here](https://github.com/prowler-cl
|
||||
|
||||
## About Prowler
|
||||
|
||||
**Prowler** is an Open Source security tool to perform AWS and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure and Google Cloud security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
|
||||
|
||||
It contains hundreds of controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
|
||||
|
||||
@@ -40,12 +40,12 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
|
||||
* `Python >= 3.9`
|
||||
* `Python pip >= 3.9`
|
||||
* AWS and/or Azure credentials
|
||||
* AWS, GCP and/or Azure credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
``` bash
|
||||
pip install prowler-cloud
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
@@ -54,7 +54,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
_Requirements_:
|
||||
|
||||
* Have `docker` installed: https://docs.docker.com/get-docker/.
|
||||
* AWS and/or Azure credentials
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* In the command below, change `-v` to your local directory path in order to access the reports.
|
||||
|
||||
_Commands_:
|
||||
@@ -71,7 +71,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
|
||||
_Requirements for Ubuntu 20.04.3 LTS_:
|
||||
|
||||
* AWS and/or Azure credentials
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* Install python 3.9 with: `sudo apt-get install python3.9`
|
||||
* Remove python 3.8 to avoid conflicts if you can: `sudo apt-get remove python3.8`
|
||||
* Make sure you have the python3 distutils package installed: `sudo apt-get install python3-distutils`
|
||||
@@ -82,27 +82,58 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
pip3.9 install prowler-cloud
|
||||
pip3.9 install prowler
|
||||
export PATH=$PATH:/home/$HOME/.local/bin/
|
||||
prowler -v
|
||||
```
|
||||
|
||||
=== "GitHub"
|
||||
|
||||
_Requirements for Developers_:
|
||||
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* `git`, `Python >= 3.9`, `pip` and `poetry` installed (`pip install poetry`)
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
```
|
||||
|
||||
=== "Amazon Linux 2"
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* AWS and/or Azure credentials
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* Latest Amazon Linux 2 should come with Python 3.9 already installed however it may need pip. Install Python pip 3.9 with: `sudo dnf install -y python3-pip`.
|
||||
* Make sure setuptools for python is already installed with: `pip3 install setuptools`
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
pip3.9 install prowler-cloud
|
||||
pip3.9 install prowler
|
||||
export PATH=$PATH:/home/$HOME/.local/bin/
|
||||
prowler -v
|
||||
```
|
||||
|
||||
=== "Brew"
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* `Brew` installed in your Mac or Linux
|
||||
* AWS, GCP and/or Azure credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
``` bash
|
||||
brew install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
=== "AWS CloudShell"
|
||||
|
||||
Prowler can be easely executed in AWS CloudShell but it has some prerequsites to be able to to so. AWS CloudShell is a container running with `Amazon Linux release 2 (Karoo)` that comes with Python 3.7, since Prowler requires Python >= 3.9 we need to first install a newer version of Python. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
|
||||
@@ -118,13 +149,13 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
./configure --enable-optimizations
|
||||
sudo make altinstall
|
||||
python3.9 --version
|
||||
cd
|
||||
cd
|
||||
```
|
||||
_Commands_:
|
||||
|
||||
* Once Python 3.9 is available we can install Prowler from pip:
|
||||
```
|
||||
pip3.9 install prowler-cloud
|
||||
pip3.9 install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
@@ -139,7 +170,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
pip install prowler-cloud
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
@@ -154,7 +185,7 @@ The available versions of Prowler are the following:
|
||||
The container images are available here:
|
||||
|
||||
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/o4g1s5r6/prowler)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
|
||||
|
||||
## High level architecture
|
||||
|
||||
@@ -163,7 +194,7 @@ You can run Prowler from your workstation, an EC2 instance, Fargate or any other
|
||||

|
||||
## Basic Usage
|
||||
|
||||
To run Prowler, you will need to specify the provider (e.g aws or azure):
|
||||
To run Prowler, you will need to specify the provider (e.g aws, gcp or azure):
|
||||
> If no provider specified, AWS will be used for backward compatibility with most of v2 options.
|
||||
|
||||
```console
|
||||
@@ -172,7 +203,7 @@ prowler <provider>
|
||||

|
||||
> Running the `prowler` command without options will use your environment variable credentials, see [Requirements](getting-started/requirements/) section to review the credentials settings.
|
||||
|
||||
If you miss the former output you can use `--verbose` but Prowler v3 is smoking fast so you won't see much ;)
|
||||
If you miss the former output you can use `--verbose` but Prowler v3 is smoking fast, so you won't see much ;)
|
||||
|
||||
By default, Prowler will generate a CSV, JSON and HTML reports, however you can generate a JSON-ASFF (used by AWS Security Hub) report with `-M` or `--output-modes`:
|
||||
|
||||
@@ -195,6 +226,7 @@ For executing specific checks or services you can use options `-c`/`checks` or `
|
||||
```console
|
||||
prowler azure --checks storage_blob_public_access_level_is_disabled
|
||||
prowler aws --services s3 ec2
|
||||
prowler gcp --services iam compute
|
||||
```
|
||||
|
||||
Also, checks and services can be excluded with options `-e`/`--excluded-checks` or `--excluded-services`:
|
||||
@@ -202,6 +234,7 @@ Also, checks and services can be excluded with options `-e`/`--excluded-checks`
|
||||
```console
|
||||
prowler aws --excluded-checks s3_bucket_public_access
|
||||
prowler azure --excluded-services defender iam
|
||||
prowler gcp --excluded-services kms
|
||||
```
|
||||
|
||||
More options and executions methods that will save your time in [Miscelaneous](tutorials/misc.md).
|
||||
@@ -221,6 +254,14 @@ prowler aws --profile custom-profile -f us-east-1 eu-south-2
|
||||
```
|
||||
> By default, `prowler` will scan all AWS regions.
|
||||
|
||||
### Google Cloud
|
||||
|
||||
Optionally, you can provide the location of an application credential JSON file with the following argument:
|
||||
|
||||
```console
|
||||
prowler gcp --credentials-file path
|
||||
```
|
||||
|
||||
### Azure
|
||||
|
||||
With Azure you need to specify which auth method is going to be used:
|
||||
|
||||
24
docs/security.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# Security
|
||||
|
||||
## Software Security
|
||||
|
||||
As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (FTR)](https://aws.amazon.com/partners/foundational-technical-review/) and we use the following tools and automation to make sure our code is secure and dependencies up-to-dated:
|
||||
|
||||
- `bandit` for code security review.
|
||||
- `safety` and `dependabot` for dependencies.
|
||||
- `hadolint` and `dockle` for our containers security.
|
||||
- `snyk` in Docker Hub.
|
||||
- `clair` in Amazon ECR.
|
||||
- `vulture`, `flake8`, `black` and `pylint` for formatting and best practices.
|
||||
|
||||
## Reporting Vulnerabilities
|
||||
|
||||
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to help@prowler.pro.
|
||||
|
||||
The information you share with Verica as part of this process is kept confidential within Verica and the Prowler team. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
|
||||
|
||||
We will review the submitted report, and assign it a tracking number. We will then respond to you, acknowledging receipt of the report, and outline the next steps in the process.
|
||||
|
||||
You will receive a non-automated response to your initial contact within 24 hours, confirming receipt of your reported vulnerability.
|
||||
|
||||
We will coordinate public notification of any validated vulnerability with you. Where possible, we prefer that our respective public disclosures be posted simultaneously.
|
||||
@@ -1,8 +1,14 @@
|
||||
# Troubleshooting
|
||||
|
||||
- Running `prowler` I get `[File: utils.py:15] [Module: utils] CRITICAL: path/redacted: OSError[13]`:
|
||||
- **Running `prowler` I get `[File: utils.py:15] [Module: utils] CRITICAL: path/redacted: OSError[13]`**:
|
||||
|
||||
That is an error related to file descriptors or opened files allowed by your operating system, with `ulimit -n 1000` you solve the issue. We have seen this issue in some macOS Ventura.
|
||||
That is an error related to file descriptors or opened files allowed by your operating system.
|
||||
|
||||
In macOS Ventura, the default value for the `file descriptors` is `256`. With the following command `ulimit -n 1000` you'll increase that value and solve the issue.
|
||||
|
||||
If you have a different OS and you are experiencing the same, please increase the value of your `file descriptors`. You can check it running `ulimit -a | grep "file descriptors"`.
|
||||
|
||||
This error is also related with a lack of system requirements. To improve performance, Prowler stores information in memory so it may need to be run in a system with more than 1GB of memory.
|
||||
|
||||
|
||||
See section [Logging](/tutorials/logging/) for further information or [conctact us](/contact/).
|
||||
See section [Logging](/tutorials/logging/) for further information or [contact us](/contact/).
|
||||
|
||||
@@ -8,34 +8,45 @@ You can use `-w`/`--allowlist-file` with the path of your allowlist yaml file, b
|
||||
## Allowlist Yaml File Syntax
|
||||
|
||||
### Account, Check and/or Region can be * to apply for all the cases
|
||||
### Resources is a list that can have either Regex or Keywords:
|
||||
### Resources is a list that can have either Regex or Keywords
|
||||
### Tags is an optional list containing tuples of 'key=value'
|
||||
########################### ALLOWLIST EXAMPLE ###########################
|
||||
Allowlist:
|
||||
Accounts:
|
||||
Accounts:
|
||||
"123456789012":
|
||||
Checks:
|
||||
Checks:
|
||||
"iam_user_hardware_mfa_enabled":
|
||||
Regions:
|
||||
Regions:
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
Resources:
|
||||
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
|
||||
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
|
||||
"*":
|
||||
Regions:
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test" # Will ignore every resource containing the string "test" in every account and region
|
||||
Resources:
|
||||
- "test" # Will ignore every resource containing the string "test" and the tags 'test=test' and 'project=test' in account 123456789012 and every region
|
||||
Tags:
|
||||
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and 'project=test' in account 123456789012 and every region
|
||||
- "project=test"
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
Checks:
|
||||
"s3_bucket_object_versioning":
|
||||
Regions:
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
Resources:
|
||||
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
|
||||
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
|
||||
- "[[:alnum:]]+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Tags:
|
||||
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
|
||||
|
||||
|
||||
## Supported Allowlist Locations
|
||||
@@ -63,14 +74,47 @@ prowler aws -w arn:aws:dynamodb:<region_name>:<account_id>:table/<table_name>
|
||||
```
|
||||
|
||||
1. The DynamoDB Table must have the following String keys:
|
||||
<img src="/img/allowlist-keys.png"/>
|
||||
<img src="../img/allowlist-keys.png"/>
|
||||
|
||||
- The Allowlist Table must have the following columns:
|
||||
- Accounts (String): This field can contain either an Account ID or an `*` (which applies to all the accounts that use this table as an allowlist).
|
||||
- Checks (String): This field can contain either a Prowler Check Name or an `*` (which applies to all the scanned checks).
|
||||
- Regions (List): This field contains a list of regions where this allowlist rule is applied (it can also contains an `*` to apply all scanned regions).
|
||||
- Resources (List): This field contains a list of regex expressions that applies to the resources that are wanted to be allowlisted.
|
||||
- Tags (List): -Optional- This field contains a list of tuples in the form of 'key=value' that applies to the resources tags that are wanted to be allowlisted.
|
||||
|
||||
<img src="/img/allowlist-row.png"/>
|
||||
<img src="../img/allowlist-row.png"/>
|
||||
|
||||
> Make sure that the used AWS credentials have `dynamodb:PartiQLSelect` permissions in the table.
|
||||
|
||||
### AWS Lambda ARN
|
||||
|
||||
You will need to pass the AWS Lambda Function ARN:
|
||||
|
||||
```
|
||||
prowler aws -w arn:aws:lambda:REGION:ACCOUNT_ID:function:FUNCTION_NAME
|
||||
```
|
||||
|
||||
Make sure that the credentials that Prowler uses can invoke the Lambda Function:
|
||||
|
||||
```
|
||||
- PolicyName: GetAllowList
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
- Action: 'lambda:InvokeFunction'
|
||||
Effect: Allow
|
||||
Resource: arn:aws:lambda:REGION:ACCOUNT_ID:function:FUNCTION_NAME
|
||||
```
|
||||
|
||||
The Lambda Function can then generate an Allowlist dynamically. Here is the code an example Python Lambda Function that
|
||||
generates an Allowlist:
|
||||
|
||||
```
|
||||
def handler(event, context):
|
||||
checks = {}
|
||||
checks["vpc_flow_logs_enabled"] = { "Regions": [ "*" ], "Resources": [ "" ], Optional("Tags"): [ "key:value" ] }
|
||||
|
||||
al = { "Allowlist": { "Accounts": { "*": { "Checks": checks } } } }
|
||||
return al
|
||||
```
|
||||
|
||||
31
docs/tutorials/aws/boto3-configuration.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# Boto3 Retrier Configuration
|
||||
|
||||
Prowler's AWS Provider uses the Boto3 [Standard](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html) retry mode to assist in retrying client calls to AWS services when these kinds of errors or exceptions are experienced. This mode includes the following behaviours:
|
||||
- A default value of 3 for maximum retry attempts. This can be overwritten with the `--aws-retries-max-attempts 5` argument.
|
||||
- Retry attempts for an expanded list of errors/exceptions:
|
||||
```
|
||||
# Transient errors/exceptions
|
||||
RequestTimeout
|
||||
RequestTimeoutException
|
||||
PriorRequestNotComplete
|
||||
ConnectionError
|
||||
HTTPClientError
|
||||
|
||||
# Service-side throttling/limit errors and exceptions
|
||||
Throttling
|
||||
ThrottlingException
|
||||
ThrottledException
|
||||
RequestThrottledException
|
||||
TooManyRequestsException
|
||||
ProvisionedThroughputExceededException
|
||||
TransactionInProgressException
|
||||
RequestLimitExceeded
|
||||
BandwidthLimitExceeded
|
||||
LimitExceededException
|
||||
RequestThrottled
|
||||
SlowDown
|
||||
EC2ThrottledException
|
||||
```
|
||||
- Retry attempts on nondescriptive, transient error codes. Specifically, these HTTP status codes: 500, 502, 503, 504.
|
||||
|
||||
- Any retry attempt will include an exponential backoff by a base factor of 2 for a maximum backoff time of 20 seconds.
|
||||
@@ -15,7 +15,7 @@ cd
|
||||
```
|
||||
- Once Python 3.9 is available we can install Prowler from pip:
|
||||
```
|
||||
pip3.9 install prowler-cloud
|
||||
pip3.9 install prowler
|
||||
```
|
||||
- Now enjoy Prowler:
|
||||
```
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Scan Multiple AWS Accounts
|
||||
|
||||
Prowler can scan multiple accounts when it is ejecuted from one account that can assume a role in those given accounts to scan using [Assume Role feature](role-assumption.md) and [AWS Organizations integration feature](organizations.md).
|
||||
Prowler can scan multiple accounts when it is executed from one account that can assume a role in those given accounts to scan using [Assume Role feature](role-assumption.md) and [AWS Organizations integration feature](organizations.md).
|
||||
|
||||
|
||||
## Scan multiple specific accounts sequentially
|
||||
|
||||
9
docs/tutorials/aws/resource-arn-based-scan.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Resource ARNs based Scan
|
||||
|
||||
Prowler allows you to scan only the resources with specific AWS Resource ARNs. This can be done with the flag `--resource-arn` followed by one or more [Amazon Resource Names (ARNs)](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html) separated by space:
|
||||
|
||||
```
|
||||
prowler aws --resource-arn arn:aws:iam::012345678910:user/test arn:aws:ec2:us-east-1:123456789012:vpc/vpc-12345678
|
||||
```
|
||||
|
||||
This example will only scan the two resources with those ARNs.
|
||||
@@ -5,6 +5,13 @@ Prowler uses the AWS SDK (Boto3) underneath so it uses the same authentication m
|
||||
However, there are few ways to run Prowler against multiple accounts using IAM Assume Role feature depending on each use case:
|
||||
|
||||
1. You can just set up your custom profile inside `~/.aws/config` with all needed information about the role to assume then call it with `prowler aws -p/--profile your-custom-profile`.
|
||||
- An example profile that performs role-chaining is given below. The `credential_source` can either be set to `Environment`, `Ec2InstanceMetadata`, or `EcsContainer`.
|
||||
- Alternatively, you could use the `source_profile` instead of `credential_source` to specify a separate named profile that contains IAM user credentials with permission to assume the target the role. More information can be found [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html).
|
||||
```
|
||||
[profile crossaccountrole]
|
||||
role_arn = arn:aws:iam::234567890123:role/SomeRole
|
||||
credential_source = EcsContainer
|
||||
```
|
||||
|
||||
2. You can use `-R`/`--role <role_arn>` and Prowler will get those temporary credentials using `Boto3` and run against that given account.
|
||||
```sh
|
||||
@@ -20,6 +27,6 @@ prowler aws -T/--session-duration <seconds> -I/--external-id <external_id> -R ar
|
||||
|
||||
To create a role to be assumed in one or multiple accounts you can use either as CloudFormation Stack or StackSet the following [template](https://github.com/prowler-cloud/prowler/blob/master/permissions/create_role_to_assume_cfn.yaml) and adapt it.
|
||||
|
||||
> _NOTE 1 about Session Duration_: Depending on the mount of checks you run and the size of your infrastructure, Prowler may require more than 1 hour to finish. Use option `-T <seconds>` to allow up to 12h (43200 seconds). To allow more than 1h you need to modify _"Maximum CLI/API session duration"_ for that particular role, read more [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session).
|
||||
> _NOTE 1 about Session Duration_: Depending on the amount of checks you run and the size of your infrastructure, Prowler may require more than 1 hour to finish. Use option `-T <seconds>` to allow up to 12h (43200 seconds). To allow more than 1h you need to modify _"Maximum CLI/API session duration"_ for that particular role, read more [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session).
|
||||
|
||||
> _NOTE 2 about Session Duration_: Bear in mind that if you are using roles assumed by role chaining there is a hard limit of 1 hour so consider not using role chaining if possible, read more about that, in foot note 1 below the table [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html).
|
||||
|
||||
@@ -13,18 +13,18 @@ Before sending findings to Prowler, you will need to perform next steps:
|
||||
- Using the AWS Management Console:
|
||||

|
||||
3. Allow Prowler to import its findings to AWS Security Hub by adding the policy below to the role or user running Prowler:
|
||||
- [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/iam/prowler-security-hub.json)
|
||||
- [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
|
||||
|
||||
Once it is enabled, it is as simple as running the command below (for all regions):
|
||||
|
||||
```sh
|
||||
./prowler aws -S
|
||||
prowler aws -S
|
||||
```
|
||||
|
||||
or for only one filtered region like eu-west-1:
|
||||
|
||||
```sh
|
||||
./prowler -S -f eu-west-1
|
||||
prowler -S -f eu-west-1
|
||||
```
|
||||
|
||||
> **Note 1**: It is recommended to send only fails to Security Hub and that is possible adding `-q` to the command.
|
||||
@@ -36,3 +36,12 @@ or for only one filtered region like eu-west-1:
|
||||
Once you run findings for first time you will be able to see Prowler findings in Findings section:
|
||||
|
||||

|
||||
|
||||
## Skip sending updates of findings to Security Hub
|
||||
|
||||
By default, Prowler archives all its findings in Security Hub that have not appeared in the last scan.
|
||||
You can skip this logic by using the option `--skip-sh-update` so Prowler will not archive older findings:
|
||||
|
||||
```sh
|
||||
prowler -S --skip-sh-update
|
||||
```
|
||||
|
||||
9
docs/tutorials/aws/tag-based-scan.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Tags-based Scan
|
||||
|
||||
Prowler allows you to scan only the resources that contain specific tags. This can be done with the flag `--resource-tags` followed by the tags `Key=Value` separated by space:
|
||||
|
||||
```
|
||||
prowler aws --resource-tags Environment=dev Project=prowler
|
||||
```
|
||||
|
||||
This example will only scan the resources that contains both tags.
|
||||
256
docs/tutorials/aws/v2_to_v3_checks_mapping.md
Normal file
@@ -0,0 +1,256 @@
|
||||
# Check mapping between Prowler v3 and v2
|
||||
|
||||
Prowler v3 comes with different identifiers but we maintained the same checks that were implemented in v2. The reason for this change is because in previows versions of Prowler, check names were mostly based on CIS Benchmark for AWS. In v3 all checks are independent from any security framework and they have its own name and ID.
|
||||
|
||||
If you need more information about how new compliance implementation works in Prowler v3 see [Compliance](../../compliance/) section.
|
||||
|
||||
```
|
||||
checks_v3_to_v2_mapping = {
|
||||
"accessanalyzer_enabled_without_findings": "extra769",
|
||||
"account_maintain_current_contact_details": "check117",
|
||||
"account_security_contact_information_is_registered": "check118",
|
||||
"account_security_questions_are_registered_in_the_aws_account": "check115",
|
||||
"acm_certificates_expiration_check": "extra730",
|
||||
"acm_certificates_transparency_logs_enabled": "extra724",
|
||||
"apigateway_authorizers_enabled": "extra746",
|
||||
"apigateway_client_certificate_enabled": "extra743",
|
||||
"apigateway_endpoint_public": "extra745",
|
||||
"apigateway_logging_enabled": "extra722",
|
||||
"apigateway_waf_acl_attached": "extra744",
|
||||
"apigatewayv2_access_logging_enabled": "extra7156",
|
||||
"apigatewayv2_authorizers_enabled": "extra7157",
|
||||
"appstream_fleet_default_internet_access_disabled": "extra7193",
|
||||
"appstream_fleet_maximum_session_duration": "extra7190",
|
||||
"appstream_fleet_session_disconnect_timeout": "extra7191",
|
||||
"appstream_fleet_session_idle_disconnect_timeout": "extra7192",
|
||||
"autoscaling_find_secrets_ec2_launch_configuration": "extra775",
|
||||
"awslambda_function_invoke_api_operations_cloudtrail_logging_enabled": "extra720",
|
||||
"awslambda_function_no_secrets_in_code": "extra760",
|
||||
"awslambda_function_no_secrets_in_variables": "extra759",
|
||||
"awslambda_function_not_publicly_accessible": "extra798",
|
||||
"awslambda_function_url_cors_policy": "extra7180",
|
||||
"awslambda_function_url_public": "extra7179",
|
||||
"awslambda_function_using_supported_runtimes": "extra762",
|
||||
"cloudformation_outputs_find_secrets": "extra742",
|
||||
"cloudformation_stacks_termination_protection_enabled": "extra7154",
|
||||
"cloudfront_distributions_field_level_encryption_enabled": "extra767",
|
||||
"cloudfront_distributions_geo_restrictions_enabled": "extra732",
|
||||
"cloudfront_distributions_https_enabled": "extra738",
|
||||
"cloudfront_distributions_logging_enabled": "extra714",
|
||||
"cloudfront_distributions_using_deprecated_ssl_protocols": "extra791",
|
||||
"cloudfront_distributions_using_waf": "extra773",
|
||||
"cloudtrail_cloudwatch_logging_enabled": "check24",
|
||||
"cloudtrail_kms_encryption_enabled": "check27",
|
||||
"cloudtrail_log_file_validation_enabled": "check22",
|
||||
"cloudtrail_logs_s3_bucket_access_logging_enabled": "check26",
|
||||
"cloudtrail_logs_s3_bucket_is_not_publicly_accessible": "check23",
|
||||
"cloudtrail_multi_region_enabled": "check21",
|
||||
"cloudtrail_s3_dataevents_read_enabled": "extra7196",
|
||||
"cloudtrail_s3_dataevents_write_enabled": "extra725",
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured": "check311",
|
||||
"cloudwatch_changes_to_network_gateways_alarm_configured": "check312",
|
||||
"cloudwatch_changes_to_network_route_tables_alarm_configured": "check313",
|
||||
"cloudwatch_changes_to_vpcs_alarm_configured": "check314",
|
||||
"cloudwatch_cross_account_sharing_disabled": "extra7144",
|
||||
"cloudwatch_log_group_kms_encryption_enabled": "extra7164",
|
||||
"cloudwatch_log_group_retention_policy_specific_days_enabled": "extra7162",
|
||||
"cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled": "check39",
|
||||
"cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled": "check35",
|
||||
"cloudwatch_log_metric_filter_authentication_failures": "check36",
|
||||
"cloudwatch_log_metric_filter_aws_organizations_changes": "extra7197",
|
||||
"cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk": "check37",
|
||||
"cloudwatch_log_metric_filter_for_s3_bucket_policy_changes": "check38",
|
||||
"cloudwatch_log_metric_filter_policy_changes": "check34",
|
||||
"cloudwatch_log_metric_filter_root_usage": "check33",
|
||||
"cloudwatch_log_metric_filter_security_group_changes": "check310",
|
||||
"cloudwatch_log_metric_filter_sign_in_without_mfa": "check32",
|
||||
"cloudwatch_log_metric_filter_unauthorized_api_calls": "check31",
|
||||
"codeartifact_packages_external_public_publishing_disabled": "extra7195",
|
||||
"codebuild_project_older_90_days": "extra7174",
|
||||
"codebuild_project_user_controlled_buildspec": "extra7175",
|
||||
"config_recorder_all_regions_enabled": "check25",
|
||||
"directoryservice_directory_log_forwarding_enabled": "extra7181",
|
||||
"directoryservice_directory_monitor_notifications": "extra7182",
|
||||
"directoryservice_directory_snapshots_limit": "extra7184",
|
||||
"directoryservice_ldap_certificate_expiration": "extra7183",
|
||||
"directoryservice_radius_server_security_protocol": "extra7188",
|
||||
"directoryservice_supported_mfa_radius_enabled": "extra7189",
|
||||
"dynamodb_accelerator_cluster_encryption_enabled": "extra7165",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled": "extra7128",
|
||||
"dynamodb_tables_pitr_enabled": "extra7151",
|
||||
"ec2_ami_public": "extra76",
|
||||
"ec2_ebs_default_encryption": "extra761",
|
||||
"ec2_ebs_public_snapshot": "extra72",
|
||||
"ec2_ebs_snapshots_encrypted": "extra740",
|
||||
"ec2_ebs_volume_encryption": "extra729",
|
||||
"ec2_elastic_ip_shodan": "extra7102",
|
||||
"ec2_elastic_ip_unassgined": "extra7146",
|
||||
"ec2_instance_imdsv2_enabled": "extra786",
|
||||
"ec2_instance_internet_facing_with_instance_profile": "extra770",
|
||||
"ec2_instance_managed_by_ssm": "extra7124",
|
||||
"ec2_instance_older_than_specific_days": "extra758",
|
||||
"ec2_instance_profile_attached": "check119",
|
||||
"ec2_instance_public_ip": "extra710",
|
||||
"ec2_instance_secrets_user_data": "extra741",
|
||||
"ec2_networkacl_allow_ingress_any_port": "extra7138",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_22": "check45",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_3389": "check46",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port": "extra748",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018": "extra753",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21": "extra7134",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22": "check41",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389": "check42",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888": "extra754",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601": "extra779",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_kafka_9092": "extra7135",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211": "extra755",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306": "extra750",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483": "extra749",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_postgres_5432": "extra751",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_redis_6379": "extra752",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_sql_server_1433_1434": "extra7137",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23": "extra7136",
|
||||
"ec2_securitygroup_allow_wide_open_public_ipv4": "extra778",
|
||||
"ec2_securitygroup_default_restrict_traffic": "check43",
|
||||
"ec2_securitygroup_from_launch_wizard": "extra7173",
|
||||
"ec2_securitygroup_not_used": "extra75",
|
||||
"ec2_securitygroup_with_many_ingress_egress_rules": "extra777",
|
||||
"ecr_repositories_lifecycle_policy_enabled": "extra7194",
|
||||
"ecr_repositories_not_publicly_accessible": "extra77",
|
||||
"ecr_repositories_scan_images_on_push_enabled": "extra765",
|
||||
"ecr_repositories_scan_vulnerabilities_in_latest_image": "extra776",
|
||||
"ecs_task_definitions_no_environment_secrets": "extra768",
|
||||
"efs_encryption_at_rest_enabled": "extra7161",
|
||||
"efs_have_backup_enabled": "extra7148",
|
||||
"efs_not_publicly_accessible": "extra7143",
|
||||
"eks_cluster_kms_cmk_encryption_in_secrets_enabled": "extra797",
|
||||
"eks_control_plane_endpoint_access_restricted": "extra796",
|
||||
"eks_control_plane_logging_all_types_enabled": "extra794",
|
||||
"eks_endpoints_not_publicly_accessible": "extra795",
|
||||
"elb_insecure_ssl_ciphers": "extra792",
|
||||
"elb_internet_facing": "extra79",
|
||||
"elb_logging_enabled": "extra717",
|
||||
"elb_ssl_listeners": "extra793",
|
||||
"elbv2_deletion_protection": "extra7150",
|
||||
"elbv2_desync_mitigation_mode": "extra7155",
|
||||
"elbv2_insecure_ssl_ciphers": "extra792",
|
||||
"elbv2_internet_facing": "extra79",
|
||||
"elbv2_listeners_underneath": "extra7158",
|
||||
"elbv2_logging_enabled": "extra717",
|
||||
"elbv2_request_smugling": "extra7142",
|
||||
"elbv2_ssl_listeners": "extra793",
|
||||
"elbv2_waf_acl_attached": "extra7129",
|
||||
"emr_cluster_account_public_block_enabled": "extra7178",
|
||||
"emr_cluster_master_nodes_no_public_ip": "extra7176",
|
||||
"emr_cluster_publicly_accesible": "extra7177",
|
||||
"glacier_vaults_policy_public_access": "extra7147",
|
||||
"glue_data_catalogs_connection_passwords_encryption_enabled": "extra7117",
|
||||
"glue_data_catalogs_metadata_encryption_enabled": "extra7116",
|
||||
"glue_database_connections_ssl_enabled": "extra7115",
|
||||
"glue_development_endpoints_cloudwatch_logs_encryption_enabled": "extra7119",
|
||||
"glue_development_endpoints_job_bookmark_encryption_enabled": "extra7121",
|
||||
"glue_development_endpoints_s3_encryption_enabled": "extra7114",
|
||||
"glue_etl_jobs_amazon_s3_encryption_enabled": "extra7118",
|
||||
"glue_etl_jobs_cloudwatch_logs_encryption_enabled": "extra7120",
|
||||
"glue_etl_jobs_job_bookmark_encryption_enabled": "extra7122",
|
||||
"guardduty_is_enabled": "extra713",
|
||||
"guardduty_no_high_severity_findings": "extra7139",
|
||||
"iam_administrator_access_with_mfa": "extra71",
|
||||
"iam_avoid_root_usage": "check11",
|
||||
"iam_check_saml_providers_sts": "extra733",
|
||||
"iam_disable_30_days_credentials": "extra774",
|
||||
"iam_disable_45_days_credentials": "extra7198",
|
||||
"iam_disable_90_days_credentials": "check13",
|
||||
"iam_no_custom_policy_permissive_role_assumption": "extra7100",
|
||||
"iam_no_expired_server_certificates_stored": "extra7199",
|
||||
"iam_no_root_access_key": "check112",
|
||||
"iam_password_policy_expires_passwords_within_90_days_or_less": "check111",
|
||||
"iam_password_policy_lowercase": "check16",
|
||||
"iam_password_policy_minimum_length_14": "check19",
|
||||
"iam_password_policy_number": "check18",
|
||||
"iam_password_policy_reuse_24": "check110",
|
||||
"iam_password_policy_symbol": "check17",
|
||||
"iam_password_policy_uppercase": "check15",
|
||||
"iam_policy_allows_privilege_escalation": "extra7185",
|
||||
"iam_policy_attached_only_to_group_or_roles": "check116",
|
||||
"iam_policy_no_administrative_privileges": "check122",
|
||||
"iam_root_hardware_mfa_enabled": "check114",
|
||||
"iam_root_mfa_enabled": "check113",
|
||||
"iam_rotate_access_key_90_days": "check14",
|
||||
"iam_support_role_created": "check120",
|
||||
"iam_user_hardware_mfa_enabled": "extra7125",
|
||||
"iam_user_mfa_enabled_console_access": "check12",
|
||||
"iam_user_no_setup_initial_access_key": "check121",
|
||||
"iam_user_two_active_access_key": "extra7123",
|
||||
"iam_role_cross_service_confused_deputy_prevention": "extra7201",
|
||||
"kms_cmk_are_used": "extra7126",
|
||||
"kms_cmk_rotation_enabled": "check28",
|
||||
"kms_key_not_publicly_accessible": "extra736",
|
||||
"macie_is_enabled": "extra712",
|
||||
"opensearch_service_domains_audit_logging_enabled": "extra7101",
|
||||
"opensearch_service_domains_cloudwatch_logging_enabled": "extra715",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled": "extra781",
|
||||
"opensearch_service_domains_https_communications_enforced": "extra783",
|
||||
"opensearch_service_domains_internal_user_database_enabled": "extra784",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled": "extra782",
|
||||
"opensearch_service_domains_not_publicly_accessible": "extra716",
|
||||
"opensearch_service_domains_updated_to_the_latest_service_software_version": "extra785",
|
||||
"opensearch_service_domains_use_cognito_authentication_for_kibana": "extra780",
|
||||
"rds_instance_backup_enabled": "extra739",
|
||||
"rds_instance_deletion_protection": "extra7113",
|
||||
"rds_instance_enhanced_monitoring_enabled": "extra7132",
|
||||
"rds_instance_integration_cloudwatch_logs": "extra747",
|
||||
"rds_instance_minor_version_upgrade_enabled": "extra7131",
|
||||
"rds_instance_multi_az": "extra7133",
|
||||
"rds_instance_no_public_access": "extra78",
|
||||
"rds_instance_storage_encrypted": "extra735",
|
||||
"rds_snapshots_public_access": "extra723",
|
||||
"redshift_cluster_audit_logging": "extra721",
|
||||
"redshift_cluster_automated_snapshot": "extra7149",
|
||||
"redshift_cluster_automatic_upgrades": "extra7160",
|
||||
"redshift_cluster_public_access": "extra711",
|
||||
"route53_domains_privacy_protection_enabled": "extra7152",
|
||||
"route53_domains_transferlock_enabled": "extra7153",
|
||||
"route53_public_hosted_zones_cloudwatch_logging_enabled": "extra719",
|
||||
"s3_account_level_public_access_blocks": "extra7186",
|
||||
"s3_bucket_acl_prohibited": "extra7172",
|
||||
"s3_bucket_default_encryption": "extra734",
|
||||
"s3_bucket_no_mfa_delete": "extra7200",
|
||||
"s3_bucket_object_versioning": "extra763",
|
||||
"s3_bucket_policy_public_write_access": "extra771",
|
||||
"s3_bucket_public_access": "extra73",
|
||||
"s3_bucket_secure_transport_policy": "extra764",
|
||||
"s3_bucket_server_access_logging_enabled": "extra718",
|
||||
"sagemaker_models_network_isolation_enabled": "extra7105",
|
||||
"sagemaker_models_vpc_settings_configured": "extra7106",
|
||||
"sagemaker_notebook_instance_encryption_enabled": "extra7112",
|
||||
"sagemaker_notebook_instance_root_access_disabled": "extra7103",
|
||||
"sagemaker_notebook_instance_vpc_settings_configured": "extra7104",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured": "extra7111",
|
||||
"sagemaker_training_jobs_intercontainer_encryption_enabled": "extra7107",
|
||||
"sagemaker_training_jobs_network_isolation_enabled": "extra7109",
|
||||
"sagemaker_training_jobs_volume_and_output_encryption_enabled": "extra7108",
|
||||
"sagemaker_training_jobs_vpc_settings_configured": "extra7110",
|
||||
"secretsmanager_automatic_rotation_enabled": "extra7163",
|
||||
"securityhub_enabled": "extra799",
|
||||
"shield_advanced_protection_in_associated_elastic_ips": "extra7166",
|
||||
"shield_advanced_protection_in_classic_load_balancers": "extra7171",
|
||||
"shield_advanced_protection_in_cloudfront_distributions": "extra7167",
|
||||
"shield_advanced_protection_in_global_accelerators": "extra7169",
|
||||
"shield_advanced_protection_in_internet_facing_load_balancers": "extra7170",
|
||||
"shield_advanced_protection_in_route53_hosted_zones": "extra7168",
|
||||
"sns_topics_kms_encryption_at_rest_enabled": "extra7130",
|
||||
"sns_topics_not_publicly_accessible": "extra731",
|
||||
"sqs_queues_not_publicly_accessible": "extra727",
|
||||
"sqs_queues_server_side_encryption_enabled": "extra728",
|
||||
"ssm_document_secrets": "extra7141",
|
||||
"ssm_documents_set_as_public": "extra7140",
|
||||
"ssm_managed_compliant_patching": "extra7127",
|
||||
"trustedadvisor_errors_and_warnings": "extra726",
|
||||
"vpc_endpoint_connections_trust_boundaries": "extra789",
|
||||
"vpc_endpoint_services_allowed_principals_trust_boundaries": "extra790",
|
||||
"vpc_flow_logs_enabled": "check29",
|
||||
"vpc_peering_routing_tables_with_least_privilege": "check44",
|
||||
"workspaces_volume_encryption_enabled": "extra7187",
|
||||
}
|
||||
```
|
||||
@@ -1,10 +1,10 @@
|
||||
# Azure subscriptions scope
|
||||
|
||||
By default Prowler is multisubscription, which means that is going to scan all the subscriptions is able to list. If you only assign permissions to one subscription it is going to scan a single one.
|
||||
By default, Prowler is multisubscription, which means that is going to scan all the subscriptions is able to list. If you only assign permissions to one subscription, it is going to scan a single one.
|
||||
Prowler also has the ability to limit the subscriptions to scan to a set passed as input argument, to do so:
|
||||
|
||||
```console
|
||||
prowler azure --az-cli-auth --subscription-ids <subscription ID 1> <subscription ID 2> ... <subscription ID N>
|
||||
```
|
||||
|
||||
Where you can pass from 1 up to N subscriptions to be scanned.
|
||||
Where you can pass from 1 up to N subscriptions to be scanned.
|
||||
|
||||
@@ -11,6 +11,24 @@ Currently, the available frameworks are:
|
||||
- `cis_1.4_aws`
|
||||
- `cis_1.5_aws`
|
||||
- `ens_rd2022_aws`
|
||||
- `aws_audit_manager_control_tower_guardrails_aws`
|
||||
- `aws_foundational_security_best_practices_aws`
|
||||
- `cisa_aws`
|
||||
- `fedramp_low_revision_4_aws`
|
||||
- `fedramp_moderate_revision_4_aws`
|
||||
- `ffiec_aws`
|
||||
- `gdpr_aws`
|
||||
- `gxp_eu_annex_11_aws`
|
||||
- `gxp_21_cfr_part_11_aws`
|
||||
- `hipaa_aws`
|
||||
- `nist_800_53_revision_4_aws`
|
||||
- `nist_800_53_revision_5_aws`
|
||||
- `nist_800_171_revision_2_aws`
|
||||
- `nist_csf_1.1_aws`
|
||||
- `pci_3.2.1_aws`
|
||||
- `rbi_cyber_security_framework_aws`
|
||||
- `soc2_aws`
|
||||
|
||||
|
||||
## List Requirements of Compliance Frameworks
|
||||
For each compliance framework, you can use option `--list-compliance-requirements` to list its requirements:
|
||||
@@ -59,40 +77,8 @@ prowler <provider> --compliance <compliance_framework>
|
||||
```
|
||||
Standard results will be shown and additionally the framework information as the sample below for CIS AWS 1.5. For details a CSV file has been generated as well.
|
||||
|
||||
<img src="/img/compliance-cis-sample1.png"/>
|
||||
<img src="../img/compliance-cis-sample1.png"/>
|
||||
|
||||
## Create and contribute adding other Security Frameworks
|
||||
|
||||
If you want to create or contribute with your own security frameworks or add public ones to Prowler you need to make sure the checks are available if not you have to create your own. Then create a compliance file per provider like in `prowler/compliance/aws/` and name it as `<framework>_<version>_<provider>.json` then follow the following format to create yours.
|
||||
|
||||
Each file version of a framework will have the following structure at high level with the case that each framework needs to be generally identified), one requirement can be also called one control but one requirement can be linked to multiple prowler checks.:
|
||||
|
||||
- `Framework`: string. Indistiguish name of the framework, like CIS
|
||||
- `Provider`: string. Provider where the framework applies, such as AWS, Azure, OCI,...
|
||||
- `Version`: string. Version of the framework itself, like 1.4 for CIS.
|
||||
- `Requirements`: array of objects. Include all requirements or controls with the mapping to Prowler.
|
||||
- `Requirements_Id`: string. Unique identifier per each requirement in the specific framework
|
||||
- `Requirements_Description`: string. Description as in the framework.
|
||||
- `Requirements_Attributes`: array of objects. Includes all needed attributes per each requirement, like levels, sections, etc. Whatever helps to create a dedicated report with the result of the findings. Attributes would be taken as closely as possible from the framework's own terminology directly.
|
||||
- `Requirements_Checks`: array. Prowler checks that are needed to prove this requirement. It can be one or multiple checks. In case of no automation possible this can be empty.
|
||||
|
||||
```
|
||||
{
|
||||
"Framework": "<framework>-<provider>",
|
||||
"Version": "<version>",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "<unique-id>",
|
||||
"Description": "Requiemente full description",
|
||||
"Checks": [
|
||||
"Here is the prowler check or checks that is going to be executed"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
<Add here your custom attributes.>
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Finally, to have a proper output file for your reports, your framework data model has to be created in `prowler/lib/outputs/models.py` and also the CLI table output in `prowler/lib/outputs/compliance.py`.
|
||||
This information is part of the Developer Guide and can be found here: https://docs.prowler.cloud/en/latest/tutorials/developer-guide/.
|
||||
|
||||
281
docs/tutorials/developer-guide.md
Normal file
@@ -0,0 +1,281 @@
|
||||
# Developer Guide
|
||||
|
||||
You can extend Prowler in many different ways, in most cases you will want to create your own checks and compliance security frameworks, here is where you can learn about how to get started with it. We also include how to create custom outputs, integrations and more.
|
||||
|
||||
## Get the code and install all dependencies
|
||||
|
||||
First of all, you need a version of Python 3.9 or higher and also pip installed to be able to install all dependencies requred. Once that is satisfied go a head and clone the repo:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
```
|
||||
For isolation and avoid conflicts with other environments, we recommend usage of `poetry`:
|
||||
```
|
||||
pip install poetry
|
||||
```
|
||||
Then install all dependencies including the ones for developers:
|
||||
```
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
## Contributing with your code or fixes to Prowler
|
||||
|
||||
This repo has git pre-commit hooks managed via the pre-commit tool. Install it how ever you like, then in the root of this repo run:
|
||||
```
|
||||
pre-commit install
|
||||
```
|
||||
You should get an output like the following:
|
||||
```
|
||||
pre-commit installed at .git/hooks/pre-commit
|
||||
```
|
||||
|
||||
Before we merge any of your pull requests we pass checks to the code, we use the following tools and automation to make sure the code is secure and dependencies up-to-dated (these should have been already installed if you ran `pipenv install -d`):
|
||||
|
||||
- `bandit` for code security review.
|
||||
- `safety` and `dependabot` for dependencies.
|
||||
- `hadolint` and `dockle` for our containers security.
|
||||
- `snyk` in Docker Hub.
|
||||
- `clair` in Amazon ECR.
|
||||
- `vulture`, `flake8`, `black` and `pylint` for formatting and best practices.
|
||||
|
||||
You can see all dependencies in file `Pipfile`.
|
||||
|
||||
## Create a new check for a Provider
|
||||
|
||||
### If the check you want to create belongs to an existing service
|
||||
|
||||
To create a new check, you will need to create a folder inside the specific service, i.e. `prowler/providers/<provider>/services/<service>/<check_name>/`, with the name of check following the pattern: `service_subservice_action`.
|
||||
Inside that folder, create the following files:
|
||||
|
||||
- An empty `__init__.py`: to make Python treat this check folder as a package.
|
||||
- A `check_name.py` containing the check's logic, for example:
|
||||
```
|
||||
# Import the Check_Report of the specific provider
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
# Import the client of the specific service
|
||||
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
|
||||
|
||||
# Create the class for the check
|
||||
class ec2_ebs_volume_encryption(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
# Iterate the service's asset that want to be analyzed
|
||||
for volume in ec2_client.volumes:
|
||||
# Initialize a Check Report for each item and assign the region, resource_id, resource_arn and resource_tags
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = volume.region
|
||||
report.resource_id = volume.id
|
||||
report.resource_arn = volume.arn
|
||||
report.resource_tags = volume.tags
|
||||
# Make the logic with conditions and create a PASS and a FAIL with a status and a status_extended
|
||||
if volume.encrypted:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"EBS Snapshot {volume.id} is encrypted."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"EBS Snapshot {volume.id} is unencrypted."
|
||||
findings.append(report) # Append a report for each item
|
||||
|
||||
return findings
|
||||
```
|
||||
- A `check_name.metadata.json` containing the check's metadata, for example:
|
||||
```
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "ec2_ebs_volume_encryption",
|
||||
"CheckTitle": "Ensure there are no EBS Volumes unencrypted.",
|
||||
"CheckType": [
|
||||
"Data Protection"
|
||||
],
|
||||
"ServiceName": "ec2",
|
||||
"SubServiceName": "volume",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsEc2Volume",
|
||||
"Description": "Ensure there are no EBS Volumes unencrypted.",
|
||||
"Risk": "Data encryption at rest prevents data visibility in the event of its unauthorized access or theft.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Encrypt all EBS volumes and Enable Encryption by default You can configure your AWS account to enforce the encryption of the new EBS volumes and snapshot copies that you create. For example; Amazon EBS encrypts the EBS volumes created when you launch an instance and the snapshots that you copy from an unencrypted snapshot.",
|
||||
"Url": "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
```
|
||||
|
||||
### If the check you want to create belongs to a service not supported already by Prowler you will need to create a new service first
|
||||
|
||||
To create a new service, you will need to create a folder inside the specific provider, i.e. `prowler/providers/<provider>/services/<service>/`.
|
||||
Inside that folder, create the following files:
|
||||
|
||||
- An empty `__init__.py`: to make Python treat this service folder as a package.
|
||||
- A `<service>_service.py`, containing all the service's logic and API Calls:
|
||||
```
|
||||
# You must import the following libraries
|
||||
import threading
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
|
||||
from prowler.providers.aws.aws_provider import generate_regional_clients
|
||||
|
||||
|
||||
# Create a class for the Service
|
||||
################## <Service>
|
||||
class <Service>:
|
||||
def __init__(self, audit_info):
|
||||
self.service = "<service>" # The name of the service boto3 client
|
||||
self.session = audit_info.audit_session
|
||||
self.audited_account = audit_info.audited_account
|
||||
self.audit_resources = audit_info.audit_resources
|
||||
self.regional_clients = generate_regional_clients(self.service, audit_info)
|
||||
self.<items> = [] # Create an empty list of the items to be gathered, e.g., instances
|
||||
self.__threading_call__(self.__describe_<items>__)
|
||||
self.__describe_<item>__() # Optionally you can create another function to retrieve more data about each item
|
||||
|
||||
def __get_session__(self):
|
||||
return self.session
|
||||
|
||||
def __threading_call__(self, call):
|
||||
threads = []
|
||||
for regional_client in self.regional_clients.values():
|
||||
threads.append(threading.Thread(target=call, args=(regional_client,)))
|
||||
for t in threads:
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
def __describe_<items>__(self, regional_client):
|
||||
"""Get ALL <Service> <Items>"""
|
||||
logger.info("<Service> - Describing <Items>...")
|
||||
try:
|
||||
describe_<items>_paginator = regional_client.get_paginator("describe_<items>") # Paginator to get every item
|
||||
for page in describe_<items>_paginator.paginate():
|
||||
for <item> in page["<Items>"]:
|
||||
if not self.audit_resources or (
|
||||
is_resource_filtered(<item>["<item_arn>"], self.audit_resources)
|
||||
):
|
||||
self.<items>.append(
|
||||
<Item>(
|
||||
arn=stack["<item_arn>"],
|
||||
name=stack["<item_name>"],
|
||||
tags=stack.get("Tags", []),
|
||||
region=regional_client.region,
|
||||
)
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __describe_<item>__(self):
|
||||
"""Get Details for a <Service> <Item>"""
|
||||
logger.info("<Service> - Describing <Item> to get specific details...")
|
||||
try:
|
||||
for <item> in self.<items>:
|
||||
<item>_details = self.regional_clients[<item>.region].describe_<item>(
|
||||
<Attribute>=<item>.name
|
||||
)
|
||||
# For example, check if item is Public
|
||||
<item>.public = <item>_details.get("Public", False)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{<item>.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class <Item>(BaseModel):
|
||||
"""<Item> holds a <Service> <Item>"""
|
||||
|
||||
arn: str
|
||||
"""<Items>[].Arn"""
|
||||
name: str
|
||||
"""<Items>[].Name"""
|
||||
public: bool
|
||||
"""<Items>[].Public"""
|
||||
tags: Optional[list] = []
|
||||
region: str
|
||||
|
||||
```
|
||||
- A `<service>_client_.py`, containing the initialization of the service's class we have just created so the service's checks can use them:
|
||||
```
|
||||
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
|
||||
from prowler.providers.aws.services.<service>.<service>_service import <Service>
|
||||
|
||||
<service>_client = <Service>(current_audit_info)
|
||||
```
|
||||
|
||||
## Create a new security compliance framework
|
||||
|
||||
If you want to create or contribute with your own security frameworks or add public ones to Prowler you need to make sure the checks are available if not you have to create your own. Then create a compliance file per provider like in `prowler/compliance/aws/` and name it as `<framework>_<version>_<provider>.json` then follow the following format to create yours.
|
||||
|
||||
Each file version of a framework will have the following structure at high level with the case that each framework needs to be generally identified, one requirement can be also called one control but one requirement can be linked to multiple prowler checks.:
|
||||
|
||||
- `Framework`: string. Indistiguish name of the framework, like CIS
|
||||
- `Provider`: string. Provider where the framework applies, such as AWS, Azure, OCI,...
|
||||
- `Version`: string. Version of the framework itself, like 1.4 for CIS.
|
||||
- `Requirements`: array of objects. Include all requirements or controls with the mapping to Prowler.
|
||||
- `Requirements_Id`: string. Unique identifier per each requirement in the specific framework
|
||||
- `Requirements_Description`: string. Description as in the framework.
|
||||
- `Requirements_Attributes`: array of objects. Includes all needed attributes per each requirement, like levels, sections, etc. Whatever helps to create a dedicated report with the result of the findings. Attributes would be taken as closely as possible from the framework's own terminology directly.
|
||||
- `Requirements_Checks`: array. Prowler checks that are needed to prove this requirement. It can be one or multiple checks. In case of no automation possible this can be empty.
|
||||
|
||||
```
|
||||
{
|
||||
"Framework": "<framework>-<provider>",
|
||||
"Version": "<version>",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "<unique-id>",
|
||||
"Description": "Requiemente full description",
|
||||
"Checks": [
|
||||
"Here is the prowler check or checks that is going to be executed"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
<Add here your custom attributes.>
|
||||
}
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Finally, to have a proper output file for your reports, your framework data model has to be created in `prowler/lib/outputs/models.py` and also the CLI table output in `prowler/lib/outputs/compliance.py`.
|
||||
|
||||
|
||||
## Create a custom output format
|
||||
|
||||
## Create a new integration
|
||||
|
||||
## Contribute with documentation
|
||||
|
||||
We use `mkdocs` to build this Prowler documentation site so you can easely contribute back with new docs or improving them.
|
||||
|
||||
1. Install `mkdocs` with your favorite package manager.
|
||||
2. Inside the `prowler` repository folder run `mkdocs serve` and point your browser to `http://localhost:8000` and you will see live changes to your local copy of this documentation site.
|
||||
3. Make all needed changes to docs or add new documents. To do so just edit existing md files inside `prowler/docs` and if you are adding a new section or file please make sure you add it to `mkdocs.yaml` file in the root folder of the Prowler repo.
|
||||
4. Once you are done with changes, please send a pull request to us for review and merge. Thank you in advance!
|
||||
|
||||
## Want some swag as appreciation for your contribution?
|
||||
|
||||
If you are like us and you love swag, we are happy to thank you for your contribution with some laptop stickers or whatever other swag we may have at that time. Please, tell us more details and your pull request link in our [Slack workspace here](https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog). You can also reach out to Toni de la Fuente on Twitter [here](https://twitter.com/ToniBlyx), his DMs are open.
|
||||
|
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 10 KiB |
|
Before Width: | Height: | Size: 51 KiB After Width: | Height: | Size: 51 KiB |
|
Before Width: | Height: | Size: 141 KiB After Width: | Height: | Size: 141 KiB |
@@ -1,16 +1,16 @@
|
||||
# Logging
|
||||
|
||||
Prowler has a logging feature to be as transparent as possible so you can see every action that is going on will the tool is been executing.
|
||||
Prowler has a logging feature to be as transparent as possible, so that you can see every action that is being performed whilst the tool is being executing.
|
||||
|
||||
## Set Log Level
|
||||
## Set Log Level
|
||||
|
||||
There are different log levels depending on the logging information that is desired to be displayed:
|
||||
|
||||
- **DEBUG**: it will show low-level logs of Python.
|
||||
- **INFO**: it will show all the API Calls that are being used in the provider.
|
||||
- **WARNING**: it will show the resources that are being **allowlisted**.
|
||||
- **ERROR**: it will show the errors, e.g., not authorized actions.
|
||||
- **CRITICAL**: default log level, if a critical log appears, it will **exit** Prowler’s execution.
|
||||
- **DEBUG**: It will show low-level logs from Python.
|
||||
- **INFO**: It will show all the API calls that are being invoked by the provider.
|
||||
- **WARNING**: It will show all resources that are being **allowlisted**.
|
||||
- **ERROR**: It will show any errors, e.g., not authorized actions.
|
||||
- **CRITICAL**: The default log level. If a critical log appears, it will **exit** Prowler’s execution.
|
||||
|
||||
You can establish the log level of Prowler with `--log-level` option:
|
||||
|
||||
@@ -20,9 +20,9 @@ prowler <provider> --log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL}
|
||||
|
||||
> By default, Prowler will run with the `CRITICAL` log level, since critical errors will abort the execution.
|
||||
|
||||
## Export Logs to File
|
||||
## Export Logs to File
|
||||
|
||||
Prowler allows you to export the logs in json format with `--log-file` option:
|
||||
Prowler allows you to export the logs in json format with the `--log-file` option:
|
||||
|
||||
```console
|
||||
prowler <provider> --log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL} --log-file <file_name>.json
|
||||
@@ -45,4 +45,4 @@ An example of a log file will be the following:
|
||||
"message": "eu-west-2 -- ClientError[124]: An error occurred (UnauthorizedOperation) when calling the DescribeNetworkAcls operation: You are not authorized to perform this operation."
|
||||
}
|
||||
|
||||
> NOTE: Each finding is a `json` object.
|
||||
> NOTE: Each finding is represented as a `json` object.
|
||||
|
||||
@@ -52,14 +52,15 @@ prowler <provider> -e/--excluded-checks ec2 rds
|
||||
prowler <provider> -C/--checks-file <checks_list>.json
|
||||
```
|
||||
|
||||
## Severities
|
||||
Each check of Prowler has a severity, there are options related with it:
|
||||
## Severities
|
||||
Each of Prowler's checks has a severity, which can be:
|
||||
- informational
|
||||
- low
|
||||
- medium
|
||||
- high
|
||||
- critical
|
||||
|
||||
- List the available checks in the provider:
|
||||
```console
|
||||
prowler <provider> --list-severities
|
||||
```
|
||||
- Execute specific severity(s):
|
||||
To execute specific severity(s):
|
||||
```console
|
||||
prowler <provider> --severity critical high
|
||||
```
|
||||
|
||||
@@ -33,9 +33,8 @@ Several checks analyse resources that are exposed to the Internet, these are:
|
||||
- ec2_instance_internet_facing_with_instance_profile
|
||||
- ec2_instance_public_ip
|
||||
- ec2_networkacl_allow_ingress_any_port
|
||||
- ec2_securitygroup_allow_ingress_from_internet_to_any_port
|
||||
- ec2_securitygroup_allow_wide_open_public_ipv4
|
||||
- ec2_securitygroup_in_use_without_ingress_filtering
|
||||
- ec2_securitygroup_allow_ingress_from_internet_to_any_port
|
||||
- ecr_repositories_not_publicly_accessible
|
||||
- eks_control_plane_endpoint_access_restricted
|
||||
- eks_endpoints_not_publicly_accessible
|
||||
|
||||
@@ -14,4 +14,6 @@ prowler <provider> -i
|
||||
|
||||
- Also, it creates by default a CSV and JSON to see detailed information about the resources extracted.
|
||||
|
||||

|
||||

|
||||
|
||||
> The inventorying process is done with `resourcegroupstaggingapi` calls (except for the IAM resources which are done with Boto3 API calls.)
|
||||
|
||||
@@ -46,9 +46,11 @@ Prowler supports natively the following output formats:
|
||||
|
||||
Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
|
||||
### HTML
|
||||

|
||||
### CSV
|
||||
| ASSESSMENT_START_TIME | FINDING_UNIQUE_ID | PROVIDER | PROFILE | ACCOUNT_ID | ACCOUNT_NAME | ACCOUNT_EMAIL | ACCOUNT_ARN | ACCOUNT_ORG | ACCOUNT_TAGS | REGION | CHECK_ID | CHECK_TITLE | CHECK_TYPE | STATUS | STATUS_EXTENDED | SERVICE_NAME | SUBSERVICE_NAME | SEVERITY | RESOURCE_ID | RESOURCE_ARN | RESOURCE_TYPE | RESOURCE_DETAILS | RESOURCE_TAGS | DESCRIPTION | RISK | RELATED_URL | REMEDIATION_RECOMMENDATION_TEXT | REMEDIATION_RECOMMENDATION_URL | REMEDIATION_RECOMMENDATION_CODE_NATIVEIAC | REMEDIATION_RECOMMENDATION_CODE_TERRAFORM | REMEDIATION_RECOMMENDATION_CODE_CLI | REMEDIATION_RECOMMENDATION_CODE_OTHER | CATEGORIES | DEPENDS_ON | RELATED_TO | NOTES |
|
||||
| ------- | ----------- | ------ | -------- | ------------ | ----------- | ---------- | ---------- | --------------------- | -------------------------- | -------------- | ----------------- | ------------------------ | --------------- | ---------- | ----------------- | --------- | -------------- | ----------------- | ------------------ | --------------------- | -------------------- | ------------------- | ------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- |
|
||||
| ASSESSMENT_START_TIME | FINDING_UNIQUE_ID | PROVIDER | PROFILE | ACCOUNT_ID | ACCOUNT_NAME | ACCOUNT_EMAIL | ACCOUNT_ARN | ACCOUNT_ORG | ACCOUNT_TAGS | REGION | CHECK_ID | CHECK_TITLE | CHECK_TYPE | STATUS | STATUS_EXTENDED | SERVICE_NAME | SUBSERVICE_NAME | SEVERITY | RESOURCE_ID | RESOURCE_ARN | RESOURCE_TYPE | RESOURCE_DETAILS | RESOURCE_TAGS | DESCRIPTION | COMPLIANCE | RISK | RELATED_URL | REMEDIATION_RECOMMENDATION_TEXT | REMEDIATION_RECOMMENDATION_URL | REMEDIATION_RECOMMENDATION_CODE_NATIVEIAC | REMEDIATION_RECOMMENDATION_CODE_TERRAFORM | REMEDIATION_RECOMMENDATION_CODE_CLI | REMEDIATION_RECOMMENDATION_CODE_OTHER | CATEGORIES | DEPENDS_ON | RELATED_TO | NOTES |
|
||||
| ------- | ----------- | ------ | -------- | ------------ | ----------- | ---------- | ---------- | --------------------- | -------------------------- | -------------- | ----------------- | ------------------------ | --------------- | ---------- | ----------------- | --------- | -------------- | ----------------- | ------------------ | --------------------- | -------------------- | ------------------- | ------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- |
|
||||
|
||||
### JSON
|
||||
|
||||
@@ -71,6 +73,10 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
"Severity": "low",
|
||||
"ResourceId": "rds-instance-id",
|
||||
"ResourceArn": "",
|
||||
"ResourceTags": {
|
||||
"test": "test",
|
||||
"enironment": "dev"
|
||||
},
|
||||
"ResourceType": "AwsRdsDbInstance",
|
||||
"ResourceDetails": "",
|
||||
"Description": "Ensure RDS instances have minor version upgrade enabled.",
|
||||
@@ -89,7 +95,15 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Notes": ""
|
||||
"Notes": "",
|
||||
"Compliance": {
|
||||
"CIS-1.4": [
|
||||
"1.20"
|
||||
],
|
||||
"CIS-1.5": [
|
||||
"1.20"
|
||||
]
|
||||
}
|
||||
},{
|
||||
"AssessmentStartTime": "2022-12-01T14:16:57.354413",
|
||||
"FindingUniqueId": "",
|
||||
@@ -109,7 +123,7 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
"ResourceId": "rds-instance-id",
|
||||
"ResourceArn": "",
|
||||
"ResourceType": "AwsRdsDbInstance",
|
||||
"ResourceDetails": "",
|
||||
"ResourceTags": {},
|
||||
"Description": "Ensure RDS instances have minor version upgrade enabled.",
|
||||
"Risk": "Auto Minor Version Upgrade is a feature that you can enable to have your database automatically upgraded when a new minor database engine version is available. Minor version upgrades often patch security vulnerabilities and fix bugs and therefore should be applied.",
|
||||
"RelatedUrl": "https://aws.amazon.com/blogs/database/best-practices-for-upgrading-amazon-rds-to-major-and-minor-versions-of-postgresql/",
|
||||
@@ -126,7 +140,8 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"Notes": ""
|
||||
"Notes": "",
|
||||
"Compliance: {}
|
||||
}]
|
||||
```
|
||||
|
||||
@@ -166,7 +181,30 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
],
|
||||
"Compliance": {
|
||||
"Status": "PASSED",
|
||||
"RelatedRequirements": []
|
||||
"RelatedRequirements": [
|
||||
"CISA your-systems-2 booting-up-thing-to-do-first-3",
|
||||
"CIS-1.5 2.3.2",
|
||||
"AWS-Foundational-Security-Best-Practices rds",
|
||||
"RBI-Cyber-Security-Framework annex_i_6",
|
||||
"FFIEC d3-cc-pm-b-1 d3-cc-pm-b-3"
|
||||
],
|
||||
"AssociatedStandards": [
|
||||
{
|
||||
"StandardsId": "CISA"
|
||||
},
|
||||
{
|
||||
"StandardsId": "CIS-1.5"
|
||||
},
|
||||
{
|
||||
"StandardsId": "AWS-Foundational-Security-Best-Practices"
|
||||
},
|
||||
{
|
||||
"StandardsId": "RBI-Cyber-Security-Framework"
|
||||
},
|
||||
{
|
||||
"StandardsId": "FFIEC"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Remediation": {
|
||||
"Recommendation": {
|
||||
@@ -205,7 +243,30 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
],
|
||||
"Compliance": {
|
||||
"Status": "PASSED",
|
||||
"RelatedRequirements": []
|
||||
"RelatedRequirements": [
|
||||
"CISA your-systems-2 booting-up-thing-to-do-first-3",
|
||||
"CIS-1.5 2.3.2",
|
||||
"AWS-Foundational-Security-Best-Practices rds",
|
||||
"RBI-Cyber-Security-Framework annex_i_6",
|
||||
"FFIEC d3-cc-pm-b-1 d3-cc-pm-b-3"
|
||||
],
|
||||
"AssociatedStandards": [
|
||||
{
|
||||
"StandardsId": "CISA"
|
||||
},
|
||||
{
|
||||
"StandardsId": "CIS-1.5"
|
||||
},
|
||||
{
|
||||
"StandardsId": "AWS-Foundational-Security-Best-Practices"
|
||||
},
|
||||
{
|
||||
"StandardsId": "RBI-Cyber-Security-Framework"
|
||||
},
|
||||
{
|
||||
"StandardsId": "FFIEC"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Remediation": {
|
||||
"Recommendation": {
|
||||
|
||||
47
mkdocs.yml
@@ -25,31 +25,38 @@ repo_url: https://github.com/prowler-cloud/prowler/
|
||||
repo_name: prowler-cloud/prowler
|
||||
|
||||
nav:
|
||||
- Getting Started:
|
||||
- Overview: index.md
|
||||
- Requirements: getting-started/requirements.md
|
||||
- Tutorials:
|
||||
- Miscellaneous: tutorials/misc.md
|
||||
- Reporting: tutorials/reporting.md
|
||||
- Compliance: tutorials/compliance.md
|
||||
- Quick Inventory: tutorials/quick-inventory.md
|
||||
- Configuration File: tutorials/configuration_file.md
|
||||
- Logging: tutorials/logging.md
|
||||
- Allowlist: tutorials/allowlist.md
|
||||
- Pentesting: tutorials/pentesting.md
|
||||
- AWS:
|
||||
- Getting Started:
|
||||
- Overview: index.md
|
||||
- Requirements: getting-started/requirements.md
|
||||
- Tutorials:
|
||||
- Miscellaneous: tutorials/misc.md
|
||||
- Reporting: tutorials/reporting.md
|
||||
- Compliance: tutorials/compliance.md
|
||||
- Quick Inventory: tutorials/quick-inventory.md
|
||||
- Configuration File: tutorials/configuration_file.md
|
||||
- Logging: tutorials/logging.md
|
||||
- Allowlist: tutorials/allowlist.md
|
||||
- Pentesting: tutorials/pentesting.md
|
||||
- Developer Guide: tutorials/developer-guide.md
|
||||
- AWS:
|
||||
- Assume Role: tutorials/aws/role-assumption.md
|
||||
- AWS Security Hub: tutorials/aws/securityhub.md
|
||||
- AWS Organizations: tutorials/aws/organizations.md
|
||||
- Scan Multiple AWS Accounts: tutorials/aws/multiaccount.md
|
||||
- AWS CloudShell: tutorials/aws/cloudshell.md
|
||||
- Azure:
|
||||
- Checks v2 to v3 Mapping: tutorials/aws/v2_to_v3_checks_mapping.md
|
||||
- Tag-based Scan: tutorials/aws/tag-based-scan.md
|
||||
- Resource ARNs based Scan: tutorials/aws/resource-arn-based-scan.md
|
||||
- Boto3 Configuration: tutorials/aws/boto3-configuration.md
|
||||
- Azure:
|
||||
- Authentication: tutorials/azure/authentication.md
|
||||
- Subscriptions: tutorials/azure/subscriptions.md
|
||||
- Contact Us: contact.md
|
||||
- Troubleshooting: troubleshooting.md
|
||||
- About: about.md
|
||||
- ProwlerPro: https://prowler.pro
|
||||
- Developer Guide: tutorials/developer-guide.md
|
||||
- Security: security.md
|
||||
- Contact Us: contact.md
|
||||
- Troubleshooting: troubleshooting.md
|
||||
- About: about.md
|
||||
- ProwlerPro: https://prowler.pro
|
||||
# Customization
|
||||
extra:
|
||||
consent:
|
||||
@@ -73,7 +80,7 @@ extra:
|
||||
link: https://twitter.com/prowlercloud
|
||||
|
||||
# Copyright
|
||||
copyright: Copyright © 2022 Toni de la Fuente, Maintained by the Prowler Team at Verica, Inc.</a>.
|
||||
copyright: Copyright © 2022 Toni de la Fuente, Maintained by the Prowler Team at Verica, Inc.</a>
|
||||
|
||||
markdown_extensions:
|
||||
- abbr
|
||||
@@ -112,4 +119,4 @@ markdown_extensions:
|
||||
alternate_style: true
|
||||
- pymdownx.tasklist:
|
||||
custom_checkbox: true
|
||||
- pymdownx.tilde
|
||||
- pymdownx.tilde
|
||||
|
||||
@@ -4,7 +4,7 @@ AWSTemplateFormatVersion: '2010-09-09'
|
||||
# aws cloudformation create-stack \
|
||||
# --capabilities CAPABILITY_IAM --capabilities CAPABILITY_NAMED_IAM \
|
||||
# --template-body "file://create_role_to_assume_cfn.yaml" \
|
||||
# --stack-name "ProwlerExecRole" \
|
||||
# --stack-name "ProwlerScanRole" \
|
||||
# --parameters "ParameterKey=AuthorisedARN,ParameterValue=arn:aws:iam::123456789012:root"
|
||||
#
|
||||
Description: |
|
||||
@@ -13,7 +13,7 @@ Description: |
|
||||
account to assume that role. The role name and the ARN of the trusted user can all be passed
|
||||
to the CloudFormation stack as parameters. Then you can run Prowler to perform a security
|
||||
assessment with a command like:
|
||||
./prowler -A <THIS_ACCOUNT_ID> -R ProwlerExecRole
|
||||
prowler --role ProwlerScanRole.ARN
|
||||
Parameters:
|
||||
AuthorisedARN:
|
||||
Description: |
|
||||
@@ -22,12 +22,12 @@ Parameters:
|
||||
Type: String
|
||||
ProwlerRoleName:
|
||||
Description: |
|
||||
Name of the IAM role that will have these policies attached. Default: ProwlerExecRole
|
||||
Name of the IAM role that will have these policies attached. Default: ProwlerScanRole
|
||||
Type: String
|
||||
Default: 'ProwlerExecRole'
|
||||
Default: 'ProwlerScanRole'
|
||||
|
||||
Resources:
|
||||
ProwlerExecRole:
|
||||
ProwlerScanRole:
|
||||
Type: AWS::IAM::Role
|
||||
Properties:
|
||||
AssumeRolePolicyDocument:
|
||||
@@ -42,31 +42,49 @@ Resources:
|
||||
# Bool:
|
||||
# 'aws:MultiFactorAuthPresent': true
|
||||
# This is 12h that is maximum allowed, Minimum is 3600 = 1h
|
||||
# to take advantage of this use -T like in './prowler -A <ACCOUNT_ID_TO_ASSUME> -R ProwlerExecRole -T 43200 -M text,html'
|
||||
# to take advantage of this use -T like in './prowler --role ProwlerScanRole.ARN -T 43200'
|
||||
MaxSessionDuration: 43200
|
||||
ManagedPolicyArns:
|
||||
- 'arn:aws:iam::aws:policy/SecurityAudit'
|
||||
- 'arn:aws:iam::aws:policy/job-function/ViewOnlyAccess'
|
||||
RoleName: !Sub ${ProwlerRoleName}
|
||||
Policies:
|
||||
- PolicyName: ProwlerExecRoleAdditionalViewPrivileges
|
||||
- PolicyName: ProwlerScanRoleAdditionalViewPrivileges
|
||||
PolicyDocument:
|
||||
Version : '2012-10-17'
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- 'ds:ListAuthorizedApplications'
|
||||
- 'account:Get*'
|
||||
- 'appstream:Describe*'
|
||||
- 'appstream:List*'
|
||||
- 'codeartifact:List*'
|
||||
- 'codebuild:BatchGet*'
|
||||
- 'ds:Get*'
|
||||
- 'ds:Describe*'
|
||||
- 'ds:List*'
|
||||
- 'ec2:GetEbsEncryptionByDefault'
|
||||
- 'ecr:Describe*'
|
||||
- 'elasticfilesystem:DescribeBackupPolicy'
|
||||
- 'glue:GetConnections'
|
||||
- 'glue:GetSecurityConfiguration'
|
||||
- 'glue:GetSecurityConfiguration*'
|
||||
- 'glue:SearchTables'
|
||||
- 'lambda:GetFunction'
|
||||
- 'lambda:GetFunction*'
|
||||
- 'macie2:GetMacieSession'
|
||||
- 's3:GetAccountPublicAccessBlock'
|
||||
- 'shield:DescribeProtection'
|
||||
- 'shield:GetSubscriptionState'
|
||||
- 'securityhub:BatchImportFindings'
|
||||
- 'securityhub:GetFindings'
|
||||
- 'ssm:GetDocument'
|
||||
- 'support:Describe*'
|
||||
- 'tag:GetTagKeys'
|
||||
Resource: '*'
|
||||
- PolicyName: ProwlerScanRoleAdditionalViewPrivilegesApiGateway
|
||||
PolicyDocument:
|
||||
Version : '2012-10-17'
|
||||
Statement:
|
||||
- Effect: Allow
|
||||
Action:
|
||||
- 'apigateway:GET'
|
||||
Resource: 'arn:aws:apigateway:*::/restapis/*'
|
||||
|
||||
@@ -3,14 +3,22 @@
|
||||
"Statement": [
|
||||
{
|
||||
"Action": [
|
||||
"ds:ListAuthorizedApplications",
|
||||
"account:Get*",
|
||||
"appstream:Describe*",
|
||||
"appstream:List*",
|
||||
"codeartifact:List*",
|
||||
"codebuild:BatchGet*",
|
||||
"ds:Describe*",
|
||||
"ds:Get*",
|
||||
"ds:List*",
|
||||
"ec2:GetEbsEncryptionByDefault",
|
||||
"ecr:Describe*",
|
||||
"elasticfilesystem:DescribeBackupPolicy",
|
||||
"glue:GetConnections",
|
||||
"glue:GetSecurityConfiguration",
|
||||
"glue:GetSecurityConfiguration*",
|
||||
"glue:SearchTables",
|
||||
"lambda:GetFunction",
|
||||
"lambda:GetFunction*",
|
||||
"macie2:GetMacieSession",
|
||||
"s3:GetAccountPublicAccessBlock",
|
||||
"shield:DescribeProtection",
|
||||
"shield:GetSubscriptionState",
|
||||
@@ -21,6 +29,15 @@
|
||||
"Resource": "*",
|
||||
"Effect": "Allow",
|
||||
"Sid": "AllowMoreReadForProwler"
|
||||
},
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"apigateway:GET"
|
||||
],
|
||||
"Resource": [
|
||||
"arn:aws:apigateway:*::/restapis/*"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
2847
poetry.lock
generated
Normal file
@@ -22,21 +22,21 @@ from prowler.lib.check.checks_loader import load_checks_to_execute
|
||||
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
|
||||
from prowler.lib.cli.parser import ProwlerArgumentParser
|
||||
from prowler.lib.logger import logger, set_logging_config
|
||||
from prowler.lib.outputs.outputs import (
|
||||
extract_findings_statistics,
|
||||
send_to_s3_bucket,
|
||||
)
|
||||
from prowler.lib.outputs.compliance import display_compliance_table
|
||||
from prowler.lib.outputs.html import add_html_footer, fill_html_overview_statistics
|
||||
from prowler.lib.outputs.json import close_json
|
||||
from prowler.lib.outputs.outputs import extract_findings_statistics, send_to_s3_bucket
|
||||
from prowler.lib.outputs.summary_table import display_summary_table
|
||||
from prowler.providers.aws.lib.allowlist.allowlist import parse_allowlist_file
|
||||
from prowler.providers.aws.lib.quick_inventory.quick_inventory import quick_inventory
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import (
|
||||
resolve_security_hub_previous_findings,
|
||||
)
|
||||
from prowler.providers.common.audit_info import set_provider_audit_info
|
||||
from prowler.providers.common.allowlist import set_provider_allowlist
|
||||
from prowler.providers.common.audit_info import (
|
||||
set_provider_audit_info,
|
||||
set_provider_execution_parameters,
|
||||
)
|
||||
from prowler.providers.common.outputs import set_provider_output_options
|
||||
from prowler.providers.common.quick_inventory import run_provider_quick_inventory
|
||||
|
||||
|
||||
def prowler():
|
||||
@@ -60,9 +60,9 @@ def prowler():
|
||||
args.output_modes.extend(compliance_framework)
|
||||
|
||||
# Set Logger configuration
|
||||
set_logging_config(args.log_file, args.log_level)
|
||||
set_logging_config(args.log_level, args.log_file, args.only_logs)
|
||||
|
||||
if args.no_banner:
|
||||
if not args.no_banner:
|
||||
print_banner(args)
|
||||
|
||||
if args.list_services:
|
||||
@@ -81,26 +81,19 @@ def prowler():
|
||||
# Load compliance frameworks
|
||||
logger.debug("Loading compliance frameworks from .json files")
|
||||
|
||||
# Load the compliance framework if specified with --compliance
|
||||
# If some compliance argument is specified we have to load it
|
||||
if (
|
||||
args.list_compliance
|
||||
or args.list_compliance_requirements
|
||||
or compliance_framework
|
||||
):
|
||||
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
# Complete checks metadata with the compliance framework specification
|
||||
update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks, bulk_checks_metadata
|
||||
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
# Complete checks metadata with the compliance framework specification
|
||||
update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks, bulk_checks_metadata
|
||||
)
|
||||
if args.list_compliance:
|
||||
print_compliance_frameworks(bulk_compliance_frameworks)
|
||||
sys.exit()
|
||||
if args.list_compliance_requirements:
|
||||
print_compliance_requirements(
|
||||
bulk_compliance_frameworks, args.list_compliance_requirements
|
||||
)
|
||||
if args.list_compliance:
|
||||
print_compliance_frameworks(bulk_compliance_frameworks)
|
||||
sys.exit()
|
||||
if args.list_compliance_requirements:
|
||||
print_compliance_requirements(
|
||||
bulk_compliance_frameworks, args.list_compliance_requirements
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit()
|
||||
|
||||
# Load checks to execute
|
||||
checks_to_execute = load_checks_to_execute(
|
||||
@@ -119,7 +112,7 @@ def prowler():
|
||||
if excluded_checks:
|
||||
checks_to_execute = exclude_checks_to_run(checks_to_execute, excluded_checks)
|
||||
|
||||
# Exclude services if -s/--excluded-services
|
||||
# Exclude services if --excluded-services
|
||||
if excluded_services:
|
||||
checks_to_execute = exclude_services_to_run(
|
||||
checks_to_execute, excluded_services, provider
|
||||
@@ -136,20 +129,22 @@ def prowler():
|
||||
# Set the audit info based on the selected provider
|
||||
audit_info = set_provider_audit_info(provider, args.__dict__)
|
||||
|
||||
# Parse content from Allowlist file and get it, if necessary, from S3
|
||||
if provider == "aws" and args.allowlist_file:
|
||||
allowlist_file = parse_allowlist_file(audit_info, args.allowlist_file)
|
||||
else:
|
||||
allowlist_file = None
|
||||
# Once the audit_info is set and we have the eventual checks based on the resource identifier,
|
||||
# it is time to check what Prowler's checks are going to be executed
|
||||
if audit_info.audit_resources:
|
||||
checks_to_execute = set_provider_execution_parameters(provider, audit_info)
|
||||
|
||||
# Setting output options based on the selected provider
|
||||
# Parse Allowlist
|
||||
allowlist_file = set_provider_allowlist(provider, audit_info, args)
|
||||
|
||||
# Set output options based on the selected provider
|
||||
audit_output_options = set_provider_output_options(
|
||||
provider, args, audit_info, allowlist_file, bulk_checks_metadata
|
||||
)
|
||||
|
||||
# Quick Inventory for AWS
|
||||
if provider == "aws" and args.quick_inventory:
|
||||
quick_inventory(audit_info, args.output_directory)
|
||||
# Run the quick inventory for the provider if available
|
||||
if hasattr(args, "quick_inventory") and args.quick_inventory:
|
||||
run_provider_quick_inventory(provider, audit_info, args.output_directory)
|
||||
sys.exit()
|
||||
|
||||
# Execute checks
|
||||
@@ -199,27 +194,33 @@ def prowler():
|
||||
)
|
||||
|
||||
# Resolve previous fails of Security Hub
|
||||
if provider == "aws" and args.security_hub:
|
||||
if provider == "aws" and args.security_hub and not args.skip_sh_update:
|
||||
resolve_security_hub_previous_findings(args.output_directory, audit_info)
|
||||
|
||||
# Display summary table
|
||||
display_summary_table(
|
||||
findings,
|
||||
audit_info,
|
||||
audit_output_options,
|
||||
provider,
|
||||
)
|
||||
|
||||
if compliance_framework and findings:
|
||||
# Display compliance table
|
||||
display_compliance_table(
|
||||
if not args.only_logs:
|
||||
display_summary_table(
|
||||
findings,
|
||||
bulk_checks_metadata,
|
||||
compliance_framework,
|
||||
audit_output_options.output_filename,
|
||||
audit_output_options.output_directory,
|
||||
audit_info,
|
||||
audit_output_options,
|
||||
provider,
|
||||
)
|
||||
|
||||
if compliance_framework and findings:
|
||||
for compliance in compliance_framework:
|
||||
# Display compliance table
|
||||
display_compliance_table(
|
||||
findings,
|
||||
bulk_checks_metadata,
|
||||
compliance,
|
||||
audit_output_options.output_filename,
|
||||
audit_output_options.output_directory,
|
||||
)
|
||||
|
||||
# If there are failed findings exit code 3, except if -z is input
|
||||
if not args.ignore_exit_code_3 and stats["total_fail"] > 0:
|
||||
sys.exit(3)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
prowler()
|
||||
|
||||
@@ -0,0 +1,214 @@
|
||||
{
|
||||
"Framework": "AWS-Audit-Manager-Control-Tower-Guardrails",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "AWS Control Tower is a management and governance service that you can use to navigate through the setup process and governance requirements that are involved in creating a multi-account AWS environment.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "1.0.1",
|
||||
"Name": "Disallow launch of EC2 instance types that are not EBS-optimized",
|
||||
"Description": "Checks whether EBS optimization is enabled for your EC2 instances that can be EBS-optimized",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "1.0.1",
|
||||
"Section": "EBS checks",
|
||||
"Service": "ebs"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "1.0.2",
|
||||
"Name": "Disallow EBS volumes that are unattached to an EC2 instance",
|
||||
"Description": "Checks whether EBS volumes are attached to EC2 instances",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "1.0.2",
|
||||
"Section": "EBS checks",
|
||||
"Service": "ebs"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "1.0.3",
|
||||
"Name": "Enable encryption for EBS volumes attached to EC2 instances",
|
||||
"Description": "Checks whether EBS volumes that are in an attached state are encrypted",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "1.0.3",
|
||||
"Section": "EBS checks",
|
||||
"Service": "ebs"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_default_encryption"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "2.0.1",
|
||||
"Name": "Disallow internet connection through RDP",
|
||||
"Description": "Checks whether security groups that are in use disallow unrestricted incoming TCP traffic to the specified",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "2.0.1",
|
||||
"Section": "Disallow Internet Connection",
|
||||
"Service": "vpc"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "2.0.2",
|
||||
"Name": "Disallow internet connection through SSH",
|
||||
"Description": "Checks whether security groups that are in use disallow unrestricted incoming SSH traffic.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "2.0.2",
|
||||
"Section": "Disallow Internet Connection",
|
||||
"Service": "vpc"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "3.0.1",
|
||||
"Name": "Disallow access to IAM users without MFA",
|
||||
"Description": "Checks whether the AWS Identity and Access Management users have multi-factor authentication (MFA) enabled.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "3.0.1",
|
||||
"Section": "Multi-Factor Authentication",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_user_mfa_enabled_console_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "3.0.2",
|
||||
"Name": "Disallow console access to IAM users without MFA",
|
||||
"Description": "Checks whether AWS Multi-Factor Authentication (MFA) is enabled for all AWS Identity and Access Management (IAM) users that use a console password.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "3.0.2",
|
||||
"Section": "Multi-Factor Authentication",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_user_mfa_enabled_console_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "3.0.3",
|
||||
"Name": "Enable MFA for the root user",
|
||||
"Description": "Checks whether the root user of your AWS account requires multi-factor authentication for console sign-in.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "3.0.3",
|
||||
"Section": "Multi-Factor Authentication",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_root_mfa_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "4.0.1",
|
||||
"Name": "Disallow public access to RDS database instances",
|
||||
"Description": "Checks whether the Amazon Relational Database Service (RDS) instances are not publicly accessible. The rule is non-compliant if the publiclyAccessible field is true in the instance configuration item.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "4.0.1",
|
||||
"Section": "Disallow Public Access",
|
||||
"Service": "rds"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_instance_no_public_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "4.0.2",
|
||||
"Name": "Disallow public access to RDS database snapshots",
|
||||
"Description": "Checks if Amazon Relational Database Service (Amazon RDS) snapshots are public. The rule is non-compliant if any existing and new Amazon RDS snapshots are public.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "4.0.2",
|
||||
"Section": "Disallow Public Access",
|
||||
"Service": "rds"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_snapshots_public_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "4.1.1",
|
||||
"Name": "Disallow public read access to S3 buckets",
|
||||
"Description": "Checks that your S3 buckets do not allow public read access.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "4.1.1",
|
||||
"Section": "Disallow Public Access",
|
||||
"Service": "s3"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_instance_no_public_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "4.1.2",
|
||||
"Name": "Disallow public write access to S3 buckets",
|
||||
"Description": "Checks that your S3 buckets do not allow public write access.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "4.1.2",
|
||||
"Section": "Disallow Public Access",
|
||||
"Service": "s3"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_bucket_policy_public_write_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "5.0.1",
|
||||
"Name": "Disallow RDS database instances that are not storage encrypted ",
|
||||
"Description": "Checks whether storage encryption is enabled for your RDS DB instances.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "5.0.1",
|
||||
"Section": "Disallow Instances",
|
||||
"Service": "rds"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_instance_storage_encrypted"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "5.1.1",
|
||||
"Name": "Disallow S3 buckets that are not versioning enabled",
|
||||
"Description": "Checks whether versioning is enabled for your S3 buckets.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "5.1.1",
|
||||
"Section": "Disallow Instances",
|
||||
"Service": "s3"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,604 @@
|
||||
{
|
||||
"Framework": "AWS-Foundational-Security-Best-Practices",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "The AWS Foundational Security Best Practices standard is a set of controls that detect when your deployed accounts and resources deviate from security best practices.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "account",
|
||||
"Name": "Account",
|
||||
"Description": "This section contains recommendations for configuring AWS Account.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "account",
|
||||
"Section": "Account",
|
||||
"Service": "account"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "acm",
|
||||
"Name": "ACM",
|
||||
"Description": "This section contains recommendations for configuring ACM resources.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "acm",
|
||||
"Section": "Acm",
|
||||
"Service": "acm"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "api-gateway",
|
||||
"Name": "API Gateway",
|
||||
"Description": "This section contains recommendations for configuring API Gateway resources.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "api-gateway",
|
||||
"Section": "API Gateway",
|
||||
"Service": "apigateway"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"apigateway_client_certificate_enabled",
|
||||
"apigateway_waf_acl_attached",
|
||||
"apigatewayv2_authorizers_enabled",
|
||||
"apigatewayv2_access_logging_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "auto-scaling",
|
||||
"Name": "Benchmark: Auto Scaling",
|
||||
"Description": "This section contains recommendations for configuring Auto Scaling resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "auto-scaling",
|
||||
"Section": "Auto Scaling",
|
||||
"Service": "autoscaling"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cloudformation",
|
||||
"Name": "Benchmark: CloudFormation",
|
||||
"Description": "This section contains recommendations for configuring CloudFormation resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cloudformation",
|
||||
"Section": "CloudFormation",
|
||||
"Service": "cloudformation"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cloudfront",
|
||||
"Name": "Benchmark: CloudFront",
|
||||
"Description": "This section contains recommendations for configuring CloudFront resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cloudfront",
|
||||
"Section": "CloudFront",
|
||||
"Service": "cloudfront"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudfront_distributions_https_enabled",
|
||||
"cloudfront_distributions_logging_enabled",
|
||||
"cloudfront_distributions_using_waf",
|
||||
"cloudfront_distributions_field_level_encryption_enabled",
|
||||
"cloudfront_distributions_using_deprecated_ssl_protocols"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cloudtrail",
|
||||
"Name": "Benchmark: CloudTrail",
|
||||
"Description": "This section contains recommendations for configuring CloudTrail resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cloudtrail",
|
||||
"Section": "CloudTrail",
|
||||
"Service": "cloudtrail"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "codebuild",
|
||||
"Name": "Benchmark: CodeBuild",
|
||||
"Description": "This section contains recommendations for configuring CodeBuild resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "codebuild",
|
||||
"Section": "CodeBuild",
|
||||
"Service": "codebuild"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "config",
|
||||
"Name": "Benchmark: Config",
|
||||
"Description": "This section contains recommendations for configuring AWS Config.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "config",
|
||||
"Section": "Config",
|
||||
"Service": "config"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"config_recorder_all_regions_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "dms",
|
||||
"Name": "Benchmark: DMS",
|
||||
"Description": "This section contains recommendations for configuring AWS DMS resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "dms",
|
||||
"Section": "DMS",
|
||||
"Service": "dms"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "dynamodb",
|
||||
"Name": "Benchmark: DynamoDB",
|
||||
"Description": "This section contains recommendations for configuring AWS Dynamo DB resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "dynamodb",
|
||||
"Section": "DynamoDB",
|
||||
"Service": "dynamodb"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_accelerator_cluster_encryption_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ec2",
|
||||
"Name": "Benchmark: EC2",
|
||||
"Description": "This section contains recommendations for configuring EC2 resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ec2",
|
||||
"Section": "EC2",
|
||||
"Service": "ec2"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_instance_older_than_specific_days",
|
||||
"vpc_flow_logs_enabled",
|
||||
"ec2_ebs_default_encryption",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_not_used"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ecr",
|
||||
"Name": "Benchmark: Elastic Container Registry",
|
||||
"Description": "This section contains recommendations for configuring AWS ECR resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ecr",
|
||||
"Section": "ECR",
|
||||
"Service": "ecr"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ecr_repositories_scan_images_on_push_enabled",
|
||||
"ecr_repositories_lifecycle_policy_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ecs",
|
||||
"Name": "Benchmark: Elastic Container Service",
|
||||
"Description": "This section contains recommendations for configuring ECS resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ecs",
|
||||
"Section": "ECS",
|
||||
"Service": "ecs"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ecs_task_definitions_no_environment_secrets"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "efs",
|
||||
"Name": "Benchmark: EFS",
|
||||
"Description": "This section contains recommendations for configuring AWS EFS resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "efs",
|
||||
"Section": "EFS",
|
||||
"Service": "efs"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"efs_have_backup_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "eks",
|
||||
"Name": "Benchmark: EKS",
|
||||
"Description": "This section contains recommendations for configuring AWS EKS resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "eks",
|
||||
"Section": "EKS",
|
||||
"Service": "eks"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "elastic-beanstalk",
|
||||
"Name": "Benchmark: Elastic Beanstalk",
|
||||
"Description": "This section contains recommendations for configuring AWS Elastic Beanstalk resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "elastic-beanstalk",
|
||||
"Section": "Elastic Beanstalk",
|
||||
"Service": "elasticbeanstalk"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "elb",
|
||||
"Name": "Benchmark: ELB",
|
||||
"Description": "This section contains recommendations for configuring Elastic Load Balancer resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "elb",
|
||||
"Section": "ELB",
|
||||
"Service": "elb"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"elbv2_deletion_protection",
|
||||
"elbv2_desync_mitigation_mode"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "elbv2",
|
||||
"Name": "Benchmark: ELBv2",
|
||||
"Description": "This section contains recommendations for configuring Elastic Load Balancer resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "elbv2",
|
||||
"Section": "ELBv2",
|
||||
"Service": "elbv2"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "emr",
|
||||
"Name": "Benchmark: EMR",
|
||||
"Description": "This section contains recommendations for configuring EMR resources.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "emr",
|
||||
"Section": "EMR",
|
||||
"Service": "emr"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"emr_cluster_master_nodes_no_public_ip"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "elasticsearch",
|
||||
"Name": "Benchmark: Elasticsearch",
|
||||
"Description": "This section contains recommendations for configuring Elasticsearch resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "elasticsearch",
|
||||
"Section": "ElasticSearch",
|
||||
"Service": "elasticsearch"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled",
|
||||
"opensearch_service_domains_audit_logging_enabled",
|
||||
"opensearch_service_domains_audit_logging_enabled",
|
||||
"opensearch_service_domains_https_communications_enforced"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "guardduty",
|
||||
"Name": "Benchmark: GuardDuty",
|
||||
"Description": "This section contains recommendations for configuring AWS GuardDuty resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "guardduty",
|
||||
"Section": "GuardDuty",
|
||||
"Service": "guardduty"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "iam",
|
||||
"Name": "Benchmark: IAM",
|
||||
"Description": "This section contains recommendations for configuring AWS IAM resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "iam",
|
||||
"Section": "IAM",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_no_root_access_key",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_disable_90_days_credentials",
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "kinesis",
|
||||
"Name": "Benchmark: Kinesis",
|
||||
"Description": "This section contains recommendations for configuring AWS Kinesis resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "kinesis",
|
||||
"Section": "Kinesis",
|
||||
"Service": "kinesis"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "kms",
|
||||
"Name": "Benchmark: KMS",
|
||||
"Description": "This section contains recommendations for configuring AWS KMS resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "kms",
|
||||
"Section": "KMS",
|
||||
"Service": "kms"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "lambda",
|
||||
"Name": "Benchmark: Lambda",
|
||||
"Description": "This section contains recommendations for configuring Lambda resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "lambda",
|
||||
"Section": "Lambda",
|
||||
"Service": "lambda"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"awslambda_function_url_public",
|
||||
"awslambda_function_using_supported_runtimes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "network-firewall",
|
||||
"Name": "Benchmark: Network Firewall",
|
||||
"Description": "This section contains recommendations for configuring Network Firewall resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "network-firewall",
|
||||
"Section": "Network Firewall",
|
||||
"Service": "network-firewall"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "opensearch",
|
||||
"Name": "Benchmark: OpenSearch",
|
||||
"Description": "This section contains recommendations for configuring OpenSearch resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "opensearch",
|
||||
"Section": "OpenSearch",
|
||||
"Service": "opensearch"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"opensearch_service_domains_not_publicly_accessible"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "rds",
|
||||
"Name": "Benchmark: RDS",
|
||||
"Description": "This section contains recommendations for configuring AWS RDS resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "rds",
|
||||
"Section": "RDS",
|
||||
"Service": "rds"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_snapshots_public_access",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_multi_az",
|
||||
"rds_instance_enhanced_monitoring_enabled",
|
||||
"rds_instance_deletion_protection",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"rds_instance_minor_version_upgrade_enabled",
|
||||
"rds_instance_multi_az"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "redshift",
|
||||
"Name": "Benchmark: Redshift",
|
||||
"Description": "This section contains recommendations for configuring AWS Redshift resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "redshift",
|
||||
"Section": "Redshift",
|
||||
"Service": "redshift"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"redshift_cluster_public_access",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"redshift_cluster_automatic_upgrades"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "s3",
|
||||
"Name": "Benchmark: S3",
|
||||
"Description": "This section contains recommendations for configuring AWS S3 resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "s3",
|
||||
"Section": "S3",
|
||||
"Service": "s3"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"s3_bucket_object_versioning",
|
||||
"s3_bucket_acl_prohibited"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "sagemaker",
|
||||
"Name": "Benchmark: SageMaker",
|
||||
"Description": "This section contains recommendations for configuring AWS Sagemaker resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "sagemaker",
|
||||
"Section": "SageMaker",
|
||||
"Service": "sagemaker"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"sagemaker_notebook_instance_vpc_settings_configured",
|
||||
"sagemaker_notebook_instance_root_access_disabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "secretsmanager",
|
||||
"Name": "Benchmark: Secrets Manager",
|
||||
"Description": "This section contains recommendations for configuring AWS Secrets Manager resources.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "secretsmanager",
|
||||
"Section": "Secrets Manager",
|
||||
"Service": "secretsmanager"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"secretsmanager_automatic_rotation_enabled",
|
||||
"secretsmanager_automatic_rotation_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "sns",
|
||||
"Name": "Benchmark: SNS",
|
||||
"Description": "This section contains recommendations for configuring AWS SNS resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "sns",
|
||||
"Section": "SNS",
|
||||
"Service": "sns"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"sns_topics_kms_encryption_at_rest_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "sqs",
|
||||
"Name": "Benchmark: SQS",
|
||||
"Description": "This section contains recommendations for configuring AWS SQS resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "sqs",
|
||||
"Section": "SQS",
|
||||
"Service": "sqs"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"sqs_queues_server_side_encryption_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ssm",
|
||||
"Name": "Benchmark: SSM",
|
||||
"Description": "This section contains recommendations for configuring AWS Systems Manager resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ssm",
|
||||
"Section": "SSM",
|
||||
"Service": "ssm"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"ssm_managed_compliant_patching",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "waf",
|
||||
"Name": "Benchmark: WAF",
|
||||
"Description": "This section contains recommendations for configuring AWS WAF resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "waf",
|
||||
"Section": "WAF",
|
||||
"Service": "waf"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
{
|
||||
"Framework": "CIS-AWS",
|
||||
"Framework": "CIS",
|
||||
"Version": "1.4",
|
||||
"Provider": "AWS",
|
||||
"Description": "The CIS Benchmark for CIS Amazon Web Services Foundations Benchmark, v1.4.0, Level 1 and 2 provides prescriptive guidance for configuring security options for a subset of Amazon Web Services. It has an emphasis on foundational, testable, and architecture agnostic settings",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "1.1",
|
||||
@@ -258,7 +260,7 @@
|
||||
"Id": "1.20",
|
||||
"Description": "Ensure that IAM Access analyzer is enabled for all regions",
|
||||
"Checks": [
|
||||
"accessanalyzer_enabled_without_findings"
|
||||
"accessanalyzer_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -531,6 +533,7 @@
|
||||
"Id": "2.1.5",
|
||||
"Description": "Ensure that S3 Buckets are configured with 'Block public access (bucket settings)'",
|
||||
"Checks": [
|
||||
"s3_bucket_level_public_access_block",
|
||||
"s3_account_level_public_access_blocks"
|
||||
],
|
||||
"Attributes": [
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
{
|
||||
"Framework": "CIS-AWS",
|
||||
"Framework": "CIS",
|
||||
"Version": "1.5",
|
||||
"Provider": "AWS",
|
||||
"Description": "The CIS Amazon Web Services Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Amazon Web Services with an emphasis on foundational, testable, and architecture agnostic settings.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "1.1",
|
||||
@@ -258,7 +260,7 @@
|
||||
"Id": "1.20",
|
||||
"Description": "Ensure that IAM Access analyzer is enabled for all regions",
|
||||
"Checks": [
|
||||
"accessanalyzer_enabled_without_findings"
|
||||
"accessanalyzer_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -531,6 +533,7 @@
|
||||
"Id": "2.1.5",
|
||||
"Description": "Ensure that S3 Buckets are configured with 'Block public access (bucket settings)'",
|
||||
"Checks": [
|
||||
"s3_bucket_level_public_access_block",
|
||||
"s3_account_level_public_access_blocks"
|
||||
],
|
||||
"Attributes": [
|
||||
|
||||
421
prowler/compliance/aws/cisa_aws.json
Normal file
@@ -0,0 +1,421 @@
|
||||
{
|
||||
"Framework": "CISA",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "Cybersecurity & Infrastructure Security Agency's (CISA) Cyber Essentials is a guide for leaders of small businesses as well as leaders of small and local government agencies to develop an actionable understanding of where to start implementing organizational cybersecurity practices.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "your-systems-1",
|
||||
"Name": "Your Systems-1",
|
||||
"Description": "Learn what is on your network. Maintain inventories of hardware and software assets to know what is in play and at-risk from attack.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-systems-1",
|
||||
"Section": "your systems",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"ec2_instance_older_than_specific_days",
|
||||
"ssm_managed_compliant_patching",
|
||||
"ec2_elastic_ip_unassgined"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your-systems-2",
|
||||
"Name": "Your Systems-2",
|
||||
"Description": "Leverage automatic updates for all operating systems and third-party software.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-systems-2",
|
||||
"Section": "your systems",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_instance_minor_version_upgrade_enabled",
|
||||
"redshift_cluster_automatic_upgrades",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your-systems-3",
|
||||
"Name": "Your Systems-3",
|
||||
"Description": "Implement security configurations for all hardware and software assets.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-systems-3",
|
||||
"Section": "your systems",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_client_certificate_enabled",
|
||||
"apigateway_logging_enabled",
|
||||
"apigateway_waf_acl_attached",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"codebuild_project_user_controlled_buildspec",
|
||||
"dynamodb_accelerator_cluster_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_ebs_default_encryption",
|
||||
"ec2_instance_public_ip",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"elb_logging_enabled",
|
||||
"elbv2_deletion_protection",
|
||||
"elbv2_waf_acl_attached",
|
||||
"elbv2_ssl_listeners",
|
||||
"elb_ssl_listeners",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"opensearch_service_domains_cloudwatch_logging_enabled",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_no_custom_policy_permissive_role_assumption",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_disable_90_days_credentials",
|
||||
"kms_cmk_rotation_enabled",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"rds_instance_enhanced_monitoring_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_deletion_protection",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"rds_instance_multi_az",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"redshift_cluster_audit_logging",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_bucket_object_versioning",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_bucket_public_access",
|
||||
"sagemaker_training_jobs_volume_and_output_encryption_enabled",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"secretsmanager_automatic_rotation_enabled",
|
||||
"securityhub_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled",
|
||||
"vpc_endpoint_connections_trust_boundaries",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your_-urroundings-1",
|
||||
"Name": "Your Surroundings-1",
|
||||
"Description": "Learn who is on your network. Maintain inventories of network connections (user accounts, vendors, business partners, etc.).",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-surroundings-1",
|
||||
"Section": "your surroundings",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_elastic_ip_unassgined",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your-surroundings-2",
|
||||
"Name": "Your Surroundings-2",
|
||||
"Description": "Leverage multi-factor authentication for all users, starting with privileged, administrative and remote access users.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-surroundings-2",
|
||||
"Section": "your surroundings",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_user_mfa_enabled_console_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your-surroundings-3",
|
||||
"Name": "Your Surroundings-3",
|
||||
"Description": "Grant access and admin permissions based on need-to-know and least privilege.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-surroundings-3",
|
||||
"Section": "your surroundings",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"elbv2_ssl_listeners",
|
||||
"iam_no_custom_policy_permissive_role_assumption",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your-surroundings-4",
|
||||
"Name": "Your Surroundings-4",
|
||||
"Description": "Leverage unique passwords for all user accounts.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-surroundings-4",
|
||||
"Section": "your surroundings",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your-data-1",
|
||||
"Name": "Your Data-1",
|
||||
"Description": "Learn how your data is protected.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-data-1",
|
||||
"Section": "your data",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_default_encryption",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_storage_encrypted",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_default_encryption",
|
||||
"sagemaker_training_jobs_volume_and_output_encryption_enabled",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your-data-2",
|
||||
"Name": "Your Data-2",
|
||||
"Description": "Learn what is happening on your network, manage network and perimeter components, host and device components, data-at-rest and in-transit, and user behavior activities.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-data-2",
|
||||
"Section": "your data",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"acm_certificates_expiration_check",
|
||||
"apigateway_client_certificate_enabled",
|
||||
"apigateway_logging_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_instance_public_ip",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"elb_logging_enabled",
|
||||
"elbv2_waf_acl_attached",
|
||||
"elbv2_ssl_listeners",
|
||||
"elb_ssl_listeners",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"opensearch_service_domains_cloudwatch_logging_enabled",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_audit_logging",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_bucket_acl_prohibited",
|
||||
"sagemaker_training_jobs_volume_and_output_encryption_enabled",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"vpc_flow_logs_enabled",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your-data-3",
|
||||
"Name": "Your Data-3",
|
||||
"Description": "Domain name system protection.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-data-3",
|
||||
"Section": "your data",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"elbv2_waf_acl_attached"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your-data-4",
|
||||
"Name": "Your Data-4",
|
||||
"Description": "Establish regular automated backups and redundancies of key systems.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-data-4",
|
||||
"Section": "your data",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"elbv2_deletion_protection",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_deletion_protection",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "your-data-5",
|
||||
"Name": "Your Data-5",
|
||||
"Description": "Leverage protections for backups, including physical security, encryption and offline copies.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-data-5",
|
||||
"Section": "your data",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "your-crisis-response-2",
|
||||
"Name": "Your Crisis Response-2",
|
||||
"Description": "Lead development of an internal reporting structure to detect, communicate and contain attacks.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "your-crisis-response-2",
|
||||
"Section": "your crisis response",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "booting-up-thing-to-do-first-1",
|
||||
"Name": "YBooting Up: Things to Do First-1",
|
||||
"Description": "Lead development of an internal reporting structure to detect, communicate and contain attacks.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "booting-up-thing-to-do-first-1",
|
||||
"Section": "booting up thing to do first",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "booting-up-thing-to-do-first-2",
|
||||
"Name": "YBooting Up: Things to Do First-2",
|
||||
"Description": "Require multi-factor authentication (MFA) for accessing your systems whenever possible. MFA should be required of all users, but start with privileged, administrative, and remote access users.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "booting-up-thing-to-do-first-2",
|
||||
"Section": "booting up thing to do first",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_user_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_user_hardware_mfa_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "booting-up-thing-to-do-first-3",
|
||||
"Name": "YBooting Up: Things to Do First-3",
|
||||
"Description": "Enable automatic updates whenever possible. Replace unsupported operating systems, applications and hardware. Test and deploy patches quickly.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "booting-up-thing-to-do-first-1",
|
||||
"Section": "booting up thing to do first",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_instance_minor_version_upgrade_enabled",
|
||||
"redshift_cluster_automatic_upgrades",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
440
prowler/compliance/aws/fedramp_low_revision_4_aws.json
Normal file
@@ -0,0 +1,440 @@
|
||||
{
|
||||
"Framework": "FedRAMP-Low-Revision-4",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "The Federal Risk and Authorization Management Program (FedRAMP) was established in 2011. It provides a cost-effective, risk-based approach for the adoption and use of cloud services by the U.S. federal government. FedRAMP empowers federal agencies to use modern cloud technologies, with an emphasis on the security and protection of federal information.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "ac-2",
|
||||
"Name": "Account Management (AC-2)",
|
||||
"Description": "Manage system accounts, group memberships, privileges, workflow, notifications, deactivations, and authorizations.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ac-2",
|
||||
"Section": "Access Control (AC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"opensearch_service_domains_cloudwatch_logging_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_user_hardware_mfa_enabled",
|
||||
"iam_disable_90_days_credentials",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ac-3",
|
||||
"Name": "Account Management (AC-3)",
|
||||
"Description": "The information system enforces approved authorizations for logical access to information and system resources in accordance with applicable access control policies.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ac-3",
|
||||
"Section": "Access Control (AC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_bucket_public_access",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ac-17",
|
||||
"Name": "Remote Access (AC-17)",
|
||||
"Description": "Authorize remote access systems prior to connection. Enforce remote connection requirements to information systems.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ac-17",
|
||||
"Section": "Access Control (AC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"acm_certificates_expiration_check",
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"elb_ssl_listeners",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"guardduty_is_enabled",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_bucket_public_access",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"securityhub_enabled",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "au-2",
|
||||
"Name": "Audit Events (AU-2)",
|
||||
"Description": "The organization: a. Determines that the information system is capable of auditing the following events: [Assignment: organization-defined auditable events]; b. Coordinates the security audit function with other organizational entities requiring audit- related information to enhance mutual support and to help guide the selection of auditable events; c. Provides a rationale for why the auditable events are deemed to be adequate support after- the-fact investigations of security incidents",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "au-2",
|
||||
"Section": "Audit and Accountability (AU)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "au-9",
|
||||
"Name": "Protection of Audit Information (AU-9)",
|
||||
"Description": "The information system protects audit information and audit tools from unauthorized access, modification, and deletion.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "au-9",
|
||||
"Section": "Audit and Accountability (AU)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "au-11",
|
||||
"Name": "Audit Record Retention (AU-11)",
|
||||
"Description": "The organization retains audit records for at least 90 days to provide support for after-the-fact investigations of security incidents and to meet regulatory and organizational information retention requirements.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "au-11",
|
||||
"Section": "Audit and Accountability (AU)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudwatch_log_group_retention_policy_specific_days_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ca-7",
|
||||
"Name": "Continuous Monitoring (CA-7)",
|
||||
"Description": "Continuously monitor configuration management processes. Determine security impact, environment and operational risks.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ca-7",
|
||||
"Section": "Security Assessment And Authorization (CA)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"elbv2_waf_acl_attached",
|
||||
"guardduty_is_enabled",
|
||||
"rds_instance_enhanced_monitoring_enabled",
|
||||
"redshift_cluster_audit_logging",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cm-2",
|
||||
"Name": "Baseline Configuration (CM-2)",
|
||||
"Description": "The organization develops, documents, and maintains under configuration control, a current baseline configuration of the information system.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cm-2",
|
||||
"Section": "Configuration Management (CM)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_waf_acl_attached",
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_instance_older_than_specific_days",
|
||||
"elbv2_deletion_protection",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_bucket_public_access",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"ssm_managed_compliant_patching",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cm-8",
|
||||
"Name": "Information System Component Inventory (CM-8)",
|
||||
"Description": "The organization develops and documents an inventory of information system components that accurately reflects the current information system, includes all components within the authorization boundary of the information system, is at the level of granularity deemed necessary for tracking and reporting and reviews and updates the information system component inventory.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cm-8",
|
||||
"Section": "Configuration Management (CM)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"guardduty_is_enabled",
|
||||
"ssm_managed_compliant_patching",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cp-9",
|
||||
"Name": "Information System Backup (CP-9)",
|
||||
"Description": "The organization conducts backups of user-level information, system-level information and information system documentation including security-related documentation contained in the information system and protects the confidentiality, integrity, and availability of backup information at storage locations.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cp-9",
|
||||
"Section": "Contingency Planning (CP)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cp-10",
|
||||
"Name": "Information System Recovery And Reconstitution (CP-10)",
|
||||
"Description": "The organization provides for the recovery and reconstitution of the information system to a known state after a disruption, compromise, or failure.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cp-10",
|
||||
"Section": "Contingency Planning (CP)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"elbv2_deletion_protection",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_multi_az",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ia-2",
|
||||
"Name": "Identification and Authentication (Organizational users) (IA-2)",
|
||||
"Description": "The information system uniquely identifies and authenticates organizational users (or processes acting on behalf of organizational users).",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ia-2",
|
||||
"Section": "Identification and Authentication (IA)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_user_mfa_enabled_console_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ir-4",
|
||||
"Name": "Incident Handling (IR-4)",
|
||||
"Description": "The organization implements an incident handling capability for security incidents that includes preparation, detection and analysis, containment, eradication, and recovery, coordinates incident handling activities with contingency planning activities and incorporates lessons learned from ongoing incident handling activities into incident response procedures, training, and testing, and implements the resulting changes accordingly.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ir-4",
|
||||
"Section": "Incident Response (IR)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"cloudwatch_changes_to_network_gateways_alarm_configured",
|
||||
"cloudwatch_changes_to_network_route_tables_alarm_configured",
|
||||
"cloudwatch_changes_to_vpcs_alarm_configured",
|
||||
"guardduty_is_enabled",
|
||||
"guardduty_no_high_severity_findings",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "sa-3",
|
||||
"Name": "System Development Life Cycle (SA-3)",
|
||||
"Description": "The organization manages the information system using organization-defined system development life cycle, defines and documents information security roles and responsibilities throughout the system development life cycle, identifies individuals having information security roles and responsibilities and integrates the organizational information security risk management process into system development life cycle activities.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "sa-3",
|
||||
"Section": "System and Services Acquisition (SA)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_managed_by_ssm"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "sc-5",
|
||||
"Name": "Denial Of Service Protection (SC-5)",
|
||||
"Description": "The information system protects against or limits the effects of the following types of denial of service attacks: [Assignment: organization-defined types of denial of service attacks or references to sources for such information] by employing [Assignment: organization-defined security safeguards].",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "sc-5",
|
||||
"Section": "System and Communications Protection (SC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"elbv2_deletion_protection",
|
||||
"guardduty_is_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_deletion_protection",
|
||||
"rds_instance_multi_az",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "sc-7",
|
||||
"Name": "Boundary Protection (SC-7)",
|
||||
"Description": "The information system: a. Monitors and controls communications at the external boundary of the system and at key internal boundaries within the system; b. Implements subnetworks for publicly accessible system components that are [Selection: physically; logically] separated from internal organizational networks; and c. Connects to external networks or information systems only through managed interfaces consisting of boundary protection devices arranged in accordance with an organizational security architecture.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "sc-7",
|
||||
"Section": "System and Communications Protection (SC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"elbv2_waf_acl_attached",
|
||||
"elb_ssl_listeners",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_bucket_public_access",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "sc-12",
|
||||
"Name": "Cryptographic Key Establishment And Management (SC-12)",
|
||||
"Description": "The organization establishes and manages cryptographic keys for required cryptography employed within the information system in accordance with [Assignment: organization-defined requirements for key generation, distribution, storage, access, and destruction].",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "sc-12",
|
||||
"Section": "System and Communications Protection (SC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"acm_certificates_expiration_check",
|
||||
"kms_cmk_rotation_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "sc-13",
|
||||
"Name": "Use of Cryptography (SC-13)",
|
||||
"Description": "The information system implements FIPS-validated or NSA-approved cryptography in accordance with applicable federal laws, Executive Orders, directives, policies, regulations, and standards.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "sc-13",
|
||||
"Section": "System and Communications Protection (SC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_bucket_default_encryption",
|
||||
"sagemaker_training_jobs_volume_and_output_encryption_enabled",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
1431
prowler/compliance/aws/fedramp_moderate_revision_4_aws.json
Normal file
902
prowler/compliance/aws/ffiec_aws.json
Normal file
@@ -0,0 +1,902 @@
|
||||
{
|
||||
"Framework": "FFIEC",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "In light of the increasing volume and sophistication of cyber threats, the Federal Financial Institutions Examination Council (FFIEC) developed the Cybersecurity Assessment Tool (Assessment), on behalf of its members, to help institutions identify their risks and determine their cybersecurity maturity.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "d1-g-it-b-1",
|
||||
"Name": "D1.G.IT.B.1",
|
||||
"Description": "An inventory of organizational assets (e.g., hardware, software, data, and systems hosted externally) is maintained.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d1-g-it-b-1",
|
||||
"Section": "Cyber Risk Management and Oversight (Domain 1)",
|
||||
"SubSection": "Governance (G)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"ec2_instance_older_than_specific_days",
|
||||
"ec2_elastic_ip_unassgined"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d1-rm-ra-b-2",
|
||||
"Name": "D1.RM.RA.B.2",
|
||||
"Description": "The risk assessment identifies Internet- based systems and high-risk transactions that warrant additional authentication controls.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d1-rm-ra-b-2",
|
||||
"Section": "Cyber Risk Management and Oversight (Domain 1)",
|
||||
"SubSection": "Risk Management (RM)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d1-rm-rm-b-1",
|
||||
"Name": "D1.RM.Rm.B.1",
|
||||
"Description": "An information security and business continuity risk management function(s) exists within the institution.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d1-rm-rm-b-1",
|
||||
"Section": "Cyber Risk Management and Oversight (Domain 1)",
|
||||
"SubSection": "Risk Management (RM)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_multi_az",
|
||||
"redshift_cluster_automated_snapshot"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d2-is-is-b-1",
|
||||
"Name": "D2.IS.Is.B.1",
|
||||
"Description": "Information security threats are gathered and shared with applicable internal employees.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d2-is-is-b-1",
|
||||
"Section": "Threat Intelligence and Collaboration (Domain 2)",
|
||||
"SubSection": "Information Sharing (IS)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d2-ma-ma-b-1",
|
||||
"Name": "D2.MA.Ma.B.1",
|
||||
"Description": "Information security threats are gathered and shared with applicable internal employees.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d2-ma-ma-b-1",
|
||||
"Section": "Threat Intelligence and Collaboration (Domain 2)",
|
||||
"SubSection": "Monitoring and Analyzing (MA)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudwatch_log_group_retention_policy_specific_days_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"opensearch_service_domains_cloudwatch_logging_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d2-ma-ma-b-2",
|
||||
"Name": "D2.MA.Ma.B.2",
|
||||
"Description": "Computer event logs are used for investigations once an event has occurred.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d2-ma-ma-b-2",
|
||||
"Section": "Threat Intelligence and Collaboration (Domain 2)",
|
||||
"SubSection": "Monitoring and Analyzing (MA)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"opensearch_service_domains_cloudwatch_logging_enabled",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d2-ti-ti-b-1",
|
||||
"Name": "D2.TI.Ti.B.1",
|
||||
"Description": "The institution belongs or subscribes to a threat and vulnerability information-sharing source(s) that provides information on threats (e.g., FS-ISAC, US- CERT).",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d2-ti-ti-b-1",
|
||||
"Section": "Threat Intelligence and Collaboration (Domain 2)",
|
||||
"SubSection": "Threat Intelligence (TI)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d2-ti-ti-b-2",
|
||||
"Name": "D2.TI.Ti.B.2",
|
||||
"Description": "Threat information is used to monitor threats and vulnerabilities.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d2-ti-ti-b-2",
|
||||
"Section": "Threat Intelligence and Collaboration (Domain 2)",
|
||||
"SubSection": "Threat Intelligence (TI)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d2-ti-ti-b-3",
|
||||
"Name": "D2.TI.Ti.B.3",
|
||||
"Description": "Threat information is used to enhance internal risk management and controls.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d2-ti-ti-b-3",
|
||||
"Section": "Threat Intelligence and Collaboration (Domain 2)",
|
||||
"SubSection": "Threat Intelligence (TI)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-cc-pm-b-1",
|
||||
"Name": "D3.CC.PM.B.1",
|
||||
"Description": "A patch management program is implemented and ensures that software and firmware patches are applied in a timely manner.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-cc-pm-b-1",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Corrective Controls (CC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_instance_minor_version_upgrade_enabled",
|
||||
"redshift_cluster_automatic_upgrades",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-cc-pm-b-3",
|
||||
"Name": "D3.CC.PM.B.3",
|
||||
"Description": "Patch management reports are reviewed and reflect missing security patches.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-cc-pm-b-3",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Corrective Controls (CC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_instance_minor_version_upgrade_enabled",
|
||||
"redshift_cluster_automatic_upgrades",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-dc-an-b-1",
|
||||
"Name": "D3.DC.An.B.1",
|
||||
"Description": "The institution is able to detect anomalous activities through monitoring across the environment.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-dc-an-b-1",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Detective Controls (DC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"guardduty_no_high_severity_findings",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-dc-an-b-2",
|
||||
"Name": "D3.DC.An.B.2",
|
||||
"Description": "Customer transactions generating anomalous activity alerts are monitored and reviewed.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-dc-an-b-2",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Detective Controls (DC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-dc-an-b-3",
|
||||
"Name": "D3.DC.An.B.3",
|
||||
"Description": "Logs of physical and/or logical access are reviewed following events.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-dc-an-b-3",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Detective Controls (DC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"opensearch_service_domains_cloudwatch_logging_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-dc-an-b-4",
|
||||
"Name": "D3.DC.An.B.4",
|
||||
"Description": "Access to critical systems by third parties is monitored for unauthorized or unusual activity.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-dc-an-b-4",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Detective Controls (DC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"opensearch_service_domains_cloudwatch_logging_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-dc-an-b-5",
|
||||
"Name": "D3.DC.An.B.5",
|
||||
"Description": "Elevated privileges are monitored.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-dc-an-b-5",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Detective Controls (DC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-dc-ev-b-1",
|
||||
"Name": "D3.DC.Ev.B.1",
|
||||
"Description": "A normal network activity baseline is established.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-dc-ev-b-1",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Detective Controls (DC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"redshift_cluster_audit_logging",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-dc-ev-b-2",
|
||||
"Name": "D3.DC.Ev.B.2",
|
||||
"Description": "Mechanisms (e.g., antivirus alerts, log event alerts) are in place to alert management to potential attacks.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-dc-ev-b-2",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Detective Controls (DC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-dc-ev-b-3",
|
||||
"Name": "D3.DC.Ev.B.3",
|
||||
"Description": "Processes are in place to monitor for the presence of unauthorized users, devices, connections, and software.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-dc-ev-b-3",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Detective Controls (DC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-dc-th-b-1",
|
||||
"Name": "D3.DC.Th.B.1",
|
||||
"Description": "Independent testing (including penetration testing and vulnerability scanning) is conducted according to the risk assessment for external-facing systems and the internal network.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-dc-th-b-1",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Detective Controls (DC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-am-b-1",
|
||||
"Name": "D3.PC.Am.B.1",
|
||||
"Description": "Employee access is granted to systems and confidential data based on job responsibilities and the principles of least privilege.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-am-b-1",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_profile_attached",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-am-b-10",
|
||||
"Name": "D3.PC.Am.B.10",
|
||||
"Description": "Production and non-production environments are segregated to prevent unauthorized access or changes to information assets. (*N/A if no production environment exists at the institution or the institution's third party.)",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-am-b-10",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-am-b-12",
|
||||
"Name": "D3.PC.Am.B.12",
|
||||
"Description": "All passwords are encrypted in storage and in transit.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-am-b-12",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_client_certificate_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_default_encryption",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-am-b-13",
|
||||
"Name": "D3.PC.Am.B.13",
|
||||
"Description": "Confidential data is encrypted when transmitted across public or untrusted networks (e.g., Internet).",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-am-b-13",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_client_certificate_enabled",
|
||||
"elbv2_insecure_ssl_ciphers",
|
||||
"elb_ssl_listeners",
|
||||
"s3_bucket_secure_transport_policy"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-am-b-15",
|
||||
"Name": "D3.PC.Am.B.15",
|
||||
"Description": "Remote access to critical systems by employees, contractors, and third parties uses encrypted connections and multifactor authentication.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-am-b-15",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_client_certificate_enabled",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"s3_bucket_secure_transport_policy"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-am-b-16",
|
||||
"Name": "D3.PC.Am.B.16",
|
||||
"Description": "Administrative, physical, or technical controls are in place to prevent users without administrative responsibilities from installing unauthorized software.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-am-b-16",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-am-b-2",
|
||||
"Name": "D3.PC.Am.B.2",
|
||||
"Description": "Employee access to systems and confidential data provides for separation of duties.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-am-b-2",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-am-b-3",
|
||||
"Name": "D3.PC.Am.B.3",
|
||||
"Description": "Elevated privileges (e.g., administrator privileges) are limited and tightly controlled (e.g., assigned to individuals, not shared, and require stronger password controls",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-am-b-3",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-am-b-6",
|
||||
"Name": "D3.PC.Am.B.6",
|
||||
"Description": "Identification and authentication are required and managed for access to systems, applications, and hardware.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-am-b-6",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_disable_90_days_credentials"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-am-b-7",
|
||||
"Name": "D3.PC.Am.B.7",
|
||||
"Description": "Access controls include password complexity and limits to password attempts and reuse.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-am-b-7",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-am-b-8",
|
||||
"Name": "D3.PC.Am.B.8",
|
||||
"Description": "All default passwords and unnecessary default accounts are changed before system implementation.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-am-b-8",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-im-b-1",
|
||||
"Name": "D3.PC.Im.B.1",
|
||||
"Description": "Network perimeter defense tools (e.g., border router and firewall) are used.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-im-b-1",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"acm_certificates_expiration_check",
|
||||
"apigateway_waf_acl_attached",
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"elbv2_waf_acl_attached",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_bucket_public_access",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-im-b-2",
|
||||
"Name": "D3.PC.Im.B.2",
|
||||
"Description": "Systems that are accessed from the Internet or by external parties are protected by firewalls or other similar devices.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-im-b-2",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_waf_acl_attached",
|
||||
"elbv2_waf_acl_attached",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-im-b-3",
|
||||
"Name": "D3.PC.Im.B.3",
|
||||
"Description": "All ports are monitored.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-im-b-3",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-im-b-5",
|
||||
"Name": "D3.PC.Im.B.5",
|
||||
"Description": "Systems configurations (for servers, desktops, routers, etc.) follow industry standards and are enforced",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-im-b-5",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"ssm_managed_compliant_patching",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-im-b-6",
|
||||
"Name": "D3.PC.Im.B.6",
|
||||
"Description": "Ports, functions, protocols and services are prohibited if no longer needed for business purposes.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-im-b-6",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-im-b-7",
|
||||
"Name": "D3.PC.Im.B.7",
|
||||
"Description": "Access to make changes to systems configurations (including virtual machines and hypervisors) is controlled and monitored.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-im-b-7",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d3-pc-se-b-1",
|
||||
"Name": "D3.PC.Se.B.1",
|
||||
"Description": "Developers working for the institution follow secure program coding practices, as part of a system development life cycle (SDLC), that meet industry standards.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d3-pc-se-b1",
|
||||
"Section": "Cybersecurity Controls (Domain 3)",
|
||||
"SubSection": "Preventative Controls (PC)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "d4-c-co-b-2",
|
||||
"Name": "D4.C.Co.B.2",
|
||||
"Description": "The institution ensures that third-party connections are authorized.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d4-c-co-b-2",
|
||||
"Section": "External Dependency Management (Domain 4)",
|
||||
"SubSection": "Connections (C)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d5-dr-de-b-1",
|
||||
"Name": "D5.DR.De.B.1",
|
||||
"Description": "Alert parameters are set for detecting information security incidents that prompt mitigating actions.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d5-dr-de-b-1",
|
||||
"Section": "Cyber Incident Management and Resilience (Domain 5)",
|
||||
"SubSection": "Detection, Response, & Mitigation (DR)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"cloudwatch_changes_to_network_gateways_alarm_configured",
|
||||
"cloudwatch_changes_to_network_route_tables_alarm_configured",
|
||||
"cloudwatch_changes_to_vpcs_alarm_configured",
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d5-dr-de-b-2",
|
||||
"Name": "D5.DR.De.B.2",
|
||||
"Description": "System performance reports contain information that can be used as a risk indicator to detect information security incidents.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d5-dr-de-b-2",
|
||||
"Section": "Cyber Incident Management and Resilience (Domain 5)",
|
||||
"SubSection": "Detection, Response, & Mitigation (DR)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "d5-dr-de-b-3",
|
||||
"Name": "D5.DR.De.B.3",
|
||||
"Description": "Tools and processes are in place to detect, alert, and trigger the incident response program.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d5-dr-de-b-3",
|
||||
"Section": "Cyber Incident Management and Resilience (Domain 5)",
|
||||
"SubSection": "Detection, Response, & Mitigation (DR)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"cloudwatch_changes_to_network_gateways_alarm_configured",
|
||||
"cloudwatch_changes_to_network_route_tables_alarm_configured",
|
||||
"cloudwatch_changes_to_vpcs_alarm_configured",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d5-er-es-b-4",
|
||||
"Name": "D5.ER.Es.B.4",
|
||||
"Description": "Incidents are classified, logged and tracked.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d5-er-es-b-4",
|
||||
"Section": "Cyber Incident Management and Resilience (Domain 5)",
|
||||
"SubSection": "Escalation and Reporting (ER)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_no_high_severity_findings"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "d5-ir-pl-b-6",
|
||||
"Name": "D5.IR.Pl.B.6",
|
||||
"Description": "The institution plans to use business continuity, disaster recovery, and data backup programs to recover operations following an incident.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "d5-ir-pl-b-6",
|
||||
"Section": "Cyber Incident Management and Resilience (Domain 5)",
|
||||
"SubSection": "Incident Resilience Planning & Strategy (IR)",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"elbv2_deletion_protection",
|
||||
"rds_instance_enhanced_monitoring_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_deletion_protection",
|
||||
"rds_instance_multi_az",
|
||||
"rds_instance_backup_enabled",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
126
prowler/compliance/aws/gdpr_aws.json
Normal file
@@ -0,0 +1,126 @@
|
||||
{
|
||||
"Framework": "GDPR",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "The General Data Protection Regulation (GDPR) is a new European privacy law that became enforceable on May 25, 2018. The GDPR replaces the EU Data Protection Directive, also known as Directive 95/46/EC. It's intended to harmonize data protection laws throughout the European Union (EU). It does this by applying a single data protection law that's binding throughout each EU member state.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "article_25",
|
||||
"Name": "Article 25 Data protection by design and by default",
|
||||
"Description": "To obtain the latest version of the official guide, please visit https://gdpr-info.eu/art-25-gdpr/. Taking into account the state of the art, the cost of implementation and the nature, scope, context and purposes of processing as well as the risks of varying likelihood and severity for rights and freedoms of natural persons posed by the processing, the controller shall, both at the time of the determination of the means for processing and at the time of the processing itself, implement appropriate technical and organisational measures, such as pseudonymisation, which are designed to implement data-protection principles, such as data minimisation, in an effective manner and to integrate the necessary safeguards into the processing in order to meet the requirements of this Regulation and protect the rights of data subjects. The controller shall implement appropriate technical and organisational measures for ensuring that, by default, only personal data which are necessary for each specific purpose of the processing are processed. That obligation applies to the amount of personal data collected, the extent of their processing, the period of their storage and their accessibility. In particular, such measures shall ensure that by default personal data are not made accessible without the individual's intervention to an indefinite number of natural persons. An approved certification mechanism pursuant to Article 42 may be used as an element to demonstrate compliance with the requirements set out in paragraphs 1 and 2 of this Article.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "article_25",
|
||||
"Section": "Article 25 Data protection by design and by default",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_logs_s3_bucket_is_not_publicly_accessible",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_logs_s3_bucket_access_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"config_recorder_all_regions_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_password_policy_reuse_24",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
"iam_support_role_created",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_disable_90_days_credentials",
|
||||
"kms_cmk_rotation_enabled",
|
||||
"cloudwatch_log_metric_filter_for_s3_bucket_policy_changes",
|
||||
"cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled",
|
||||
"cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled",
|
||||
"cloudwatch_log_metric_filter_authentication_failures",
|
||||
"cloudwatch_log_metric_filter_sign_in_without_mfa",
|
||||
"cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk",
|
||||
"cloudwatch_log_metric_filter_policy_changes",
|
||||
"cloudwatch_log_metric_filter_root_usage",
|
||||
"cloudwatch_log_metric_filter_security_group_changes",
|
||||
"cloudwatch_log_metric_filter_unauthorized_api_calls",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "article_30",
|
||||
"Name": "Article 30 Records of processing activities",
|
||||
"Description": " To obtain the latest version of the official guide, please visit https://www.privacy-regulation.eu/en/article-30-records-of-processing-activities-GDPR.htm. Each controller and, where applicable, the controller's representative, shall maintain a record of processing activities under its responsibility. That record shall contain all of the following information like the name and contact details of the controller and where applicable, the joint controller, the controller's representative and the data protection officer, the purposes of the processing etc. Each processor and where applicable, the processor's representative shall maintain a record of all categories of processing activities carried out on behalf of a controller, containing the name and contact details of the processor or processors and of each controller on behalf of which the processor is acting, and, where applicable of the controller's or the processor's representative, and the data protection officer, where applicable, transfers of personal data to a third country or an international organisation, including the identification of that third country or international organisation and, in the case of transfers referred to in the second subparagraph of Article 49(1), the documentation of suitable safeguards. The records referred to in paragraphs 1 and 2 shall be in writing, including in electronic form. The controller or the processor and, where applicable, the controller's or the processor's representative, shall make the record available to the supervisory authority on request. The obligations referred to in paragraphs 1 and 2 shall not apply to an enterprise or an organisation employing fewer than 250 persons unless the processing it carries out is likely to result in a risk to the rights and freedoms of data subjects, the processing is not occasional, or the processing includes special categories of data as referred to in Article 9(1) or personal data relating to criminal convictions and offences referred to in Article 10.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "article_30",
|
||||
"Section": "Article 30 Records of processing activities",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"config_recorder_all_regions_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"kms_cmk_rotation_enabled",
|
||||
"redshift_cluster_audit_logging",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "article_32",
|
||||
"Name": "Article 32 Security of processing",
|
||||
"Description": " To obtain the latest version of the official guide, please visit https://gdpr-info.eu/art-32-gdpr/. Taking into account the state of the art, the costs of implementation and the nature, scope, context and purposes of processing as well as the risk of varying likelihood and severity for the rights and freedoms of natural persons, the controller and the processor shall implement appropriate technical and organisational measures to ensure a level of security appropriate to the risk, including inter alia as appropriate. In assessing the appropriate level of security account shall be taken in particular of the risks that are presented by processing, in particular from accidental or unlawful destruction, loss, alteration, unauthorised disclosure of, or access to personal data transmitted, stored or otherwise processed. Adherence to an approved code of conduct as referred to in Article 40 or an approved certification mechanism as referred to in Article 42 may be used as an element by which to demonstrate compliance with the requirements set out in paragraph 1 of this Article. The controller and processor shall take steps to ensure that any natural person acting under the authority of the controller or the processor who has access to personal data does not process them except on instructions from the controller, unless he or she is required to do so by Union or Member State law.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "article_32",
|
||||
"Section": "Article 32 Security of processing",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"acm_certificates_expiration_check",
|
||||
"cloudfront_distributions_https_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"dynamodb_accelerator_cluster_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"elb_ssl_listeners",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"rds_instance_storage_encrypted",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
347
prowler/compliance/aws/gxp_21_cfr_part_11_aws.json
Normal file
@@ -0,0 +1,347 @@
|
||||
{
|
||||
"Framework": "GxP-21-CFR-Part-11",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "GxP refers to the regulations and guidelines that are applicable to life sciences organizations that make food and medical products. Medical products that fall under this include medicines, medical devices, and medical software applications. The overall intent of GxP requirements is to ensure that food and medical products are safe for consumers. It's also to ensure the integrity of data that's used to make product-related safety decisions.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "11.10-a",
|
||||
"Name": "11.10(a)",
|
||||
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (a) Validation of systems to ensure accuracy, reliability, consistent intended performance, and the ability to discern invalid or altered records.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "11.10-a",
|
||||
"Section": "11.10 Controls for closed systems",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"ec2_instance_older_than_specific_days",
|
||||
"elbv2_deletion_protection",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_deletion_protection",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_multi_az",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning",
|
||||
"ssm_managed_compliant_patching",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "11.10-c",
|
||||
"Name": "11.10(c)",
|
||||
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (c) Protection of records to enable their accurate and ready retrieval throughout the records retention period.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "11.10-c",
|
||||
"Section": "11.10 Controls for closed systems",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudwatch_log_group_retention_policy_specific_days_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_audit_logging",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_bucket_object_versioning",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"sagemaker_notebook_instance_encryption_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "11.10-d",
|
||||
"Name": "11.10(d)",
|
||||
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (d) Limiting system access to authorized individuals.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "11.10-d",
|
||||
"Section": "11.10 Controls for closed systems",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_profile_attached",
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_bucket_public_access",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"secretsmanager_automatic_rotation_enabled",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "11.10-e",
|
||||
"Name": "11.10(e)",
|
||||
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (e) Use of secure, computer-generated, time-stamped audit trails to independently record the date and time of operator entries and actions that create, modify, or delete electronic records. Record changes shall not obscure previously recorded information. Such audit trail documentation shall be retained for a period at least as long as that required for the subject electronic records and shall be available for agency review and copying.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "11.10-d",
|
||||
"Section": "11.10 Controls for closed systems",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudwatch_log_group_retention_policy_specific_days_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"opensearch_service_domains_cloudwatch_logging_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "11.10-g",
|
||||
"Name": "11.10(g)",
|
||||
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (g) Use of authority checks to ensure that only authorized individuals can use the system, electronically sign a record, access the operation or computer system input or output device, alter a record, or perform the operation at hand.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "11.10-g",
|
||||
"Section": "11.10 Controls for closed systems",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_ebs_default_encryption",
|
||||
"ec2_instance_profile_attached",
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_disable_90_days_credentials",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_bucket_public_access",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"secretsmanager_automatic_rotation_enabled",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "11.10-h",
|
||||
"Name": "11.10(h)",
|
||||
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (h) Use of device (e.g., terminal) checks to determine, as appropriate, the validity of the source of data input or operational instruction.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "11.10-h",
|
||||
"Section": "11.10 Controls for closed systems",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"ssm_managed_compliant_patching",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "11.10-k",
|
||||
"Name": "11.10(k)",
|
||||
"Description": "Persons who use closed systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, when appropriate, the confidentiality of electronic records, and to ensure that the signer cannot readily repudiate the signed record as not genuine. Such procedures and controls shall include the following: (k) Use of appropriate controls over systems documentation including: (1) Adequate controls over the distribution of, access to, and use of documentation for system operation and maintenance. (2) Revision and change control procedures to maintain an audit trail that documents time-sequenced development and modification of systems documentation.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "11.10-k",
|
||||
"Section": "11.10 Controls for closed systems",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"ec2_ebs_public_snapshot",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "11.30",
|
||||
"Name": "11.30 Controls for open systems",
|
||||
"Description": "Persons who use open systems to create, modify, maintain, or transmit electronic records shall employ procedures and controls designed to ensure the authenticity, integrity, and, as appropriate, the confidentiality of electronic records from the point of their creation to the point of their receipt. Such procedures and controls shall include those identified in 11.10, as appropriate, and additional measures such as document encryption and use of appropriate digital signature standards to ensure, as necessary under the circumstances, record authenticity, integrity, and confidentiality.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "11.30",
|
||||
"Section": "11.30 Controls for open systems",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_client_certificate_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_default_encryption",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"elbv2_insecure_ssl_ciphers",
|
||||
"elb_ssl_listeners",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled",
|
||||
"kms_cmk_rotation_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_storage_encrypted",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "11.200",
|
||||
"Name": "11.200 Electronic signature components and controls",
|
||||
"Description": "(a) Electronic signatures that are not based upon biometrics shall: (1) Employ at least two distinct identification components such as an identification code and password. (i) When an individual executes a series of signings during a single, continuous period of controlled system access, the first signing shall be executed using all electronic signature components; subsequent signings shall be executed using at least one electronic signature component that is only executable by, and designed to be used only by, the individual. (ii) When an individual executes one or more signings not performed during a single, continuous period of controlled system access, each signing shall be executed using all of the electronic signature components. (2) Be used only by their genuine owners; and (3) Be administered and executed to ensure that attempted use of an individual's electronic signature by anyone other than its genuine owner requires collaboration of two or more individuals.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "11.200",
|
||||
"Section": "11.200 Electronic signature components and controls",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_no_root_access_key",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_user_mfa_enabled_console_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "11.300-b",
|
||||
"Name": "11.300(b)",
|
||||
"Description": "Persons who use electronic signatures based upon use of identification codes in combination with passwords shall employ controls to ensure their security and integrity. Such controls shall include: (b) Ensuring that identification code and password issuances are periodically checked, recalled, or revised (e.g., to cover such events as password aging).",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "11.300-b",
|
||||
"Section": "11.300 Controls for identification codes/passwords",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_disable_90_days_credentials",
|
||||
"secretsmanager_automatic_rotation_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "11.300-d",
|
||||
"Name": "11.300(d)",
|
||||
"Description": "Persons who use electronic signatures based upon use of identification codes in combination with passwords shall employ controls to ensure their security and integrity. Such controls shall include: (d) Use of transaction safeguards to prevent unauthorized use of passwords and/or identification codes, and to detect and report in an immediate and urgent manner any attempts at their unauthorized use to the system security unit, and, as appropriate, to organizational management.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "11.300-d",
|
||||
"Section": "11.300 Controls for identification codes/passwords",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
281
prowler/compliance/aws/gxp_eu_annex_11_aws.json
Normal file
@@ -0,0 +1,281 @@
|
||||
{
|
||||
"Framework": "GxP-EU-Annex-11",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "The GxP EU Annex 11 framework is the European equivalent to the FDA 21 CFR part 11 framework in the United States. This annex applies to all forms of computerized systems that are used as part of Good Manufacturing Practices (GMP) regulated activities. A computerized system is a set of software and hardware components that together fulfill certain functionalities. The application should be validated and IT infrastructure should be qualified. Where a computerized system replaces a manual operation, there should be no resultant decrease in product quality, process control, or quality assurance. There should be no increase in the overall risk of the process.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "1-risk-management",
|
||||
"Name": "1 Risk Management",
|
||||
"Description": "Risk management should be applied throughout the lifecycle of the computerised system taking into account patient safety, data integrity and product quality. As part of a risk management system, decisions on the extent of validation and data integrity controls should be based on a justified and documented risk assessment of the computerised system.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "1-risk-management",
|
||||
"Section": "General",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "5-data",
|
||||
"Name": "5 Data",
|
||||
"Description": "Computerised systems exchanging data electronically with other systems should include appropriate built-in checks for the correct and secure entry and processing of data, in order to minimize the risks.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "5-data",
|
||||
"Section": "Operational Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "7.1-data-storage-damage-protection",
|
||||
"Name": "7.1 Data Storage - Damage Protection",
|
||||
"Description": "Data should be secured by both physical and electronic means against damage. Stored data should be checked for accessibility, readability and accuracy. Access to data should be ensured throughout the retention period.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "7.1-data-storage-damage-protection",
|
||||
"Section": "Operational Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"dynamodb_accelerator_cluster_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_default_encryption",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"eks_cluster_kms_cmk_encryption_in_secrets_enabled",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_object_versioning",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "7.2-data-storage-backups",
|
||||
"Name": "7.2 Data Storage - Backups",
|
||||
"Description": "Regular back-ups of all relevant data should be done. Integrity and accuracy of backup data and the ability to restore the data should be checked during validation and monitored periodically.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "7.2-data-storage-backups",
|
||||
"Section": "Operational Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_instance_backup_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "8.2-printouts-data-changes",
|
||||
"Name": "8.2 Printouts - Data Changes",
|
||||
"Description": "For records supporting batch release it should be possible to generate printouts indicating if any of the data has been changed since the original entry.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "8.2-printouts-data-changes",
|
||||
"Section": "Operational Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "9-audit-trails",
|
||||
"Name": "9 Audit Trails",
|
||||
"Description": "Consideration should be given, based on a risk assessment, to building into the system the creation of a record of all GMP-relevant changes and deletions (a system generated 'audit trail'). For change or deletion of GMP-relevant data the reason should be documented. Audit trails need to be available and convertible to a generally intelligible form and regularly reviewed.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "9-audit-trails",
|
||||
"Section": "Operational Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "10-change-and-configuration-management",
|
||||
"Name": "10 Change and Configuration Management",
|
||||
"Description": "Any changes to a computerised system including system configurations should only be made in a controlled manner in accordance with a defined procedure.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "10-change-and-configuration-management",
|
||||
"Section": "Operational Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"config_recorder_all_regions_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "12.4-security-audit-trail",
|
||||
"Name": "12.4 Security - Audit Trail",
|
||||
"Description": "Management systems for data and for documents should be designed to record the identity of operators entering, changing, confirming or deleting data including date and time.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "12.4-security-audit-trail",
|
||||
"Section": "Operational Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "16-business-continuity",
|
||||
"Name": "16 Business Continuity",
|
||||
"Description": "For the availability of computerised systems supporting critical processes, provisions should be made to ensure continuity of support for those processes in the event of a system breakdown (e.g. a manual or alternative system). The time required to bring the alternative arrangements into use should be based on risk and appropriate for a particular system and the business process it supports. These arrangements should be adequately documented and tested.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "16-business-continuity",
|
||||
"Section": "Operational Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "17-archiving",
|
||||
"Name": "17 Archiving",
|
||||
"Description": "Data may be archived. This data should be checked for accessibility, readability and integrity. If relevant changes are to be made to the system (e.g. computer equipment or programs), then the ability to retrieve the data should be ensured and tested.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "17-archiving",
|
||||
"Section": "Operational Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "4.2-validation-documentation-change-control",
|
||||
"Name": "4.2 Validation - Documentation Change Control",
|
||||
"Description": "Validation documentation should include change control records (if applicable) and reports on any deviations observed during the validation process.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "4.2-validation-documentation-change-control",
|
||||
"Section": "Project Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "4.5-validation-development-quality",
|
||||
"Name": "4.5 Validation - Development Quality",
|
||||
"Description": "The regulated user should take all reasonable steps, to ensure that the system has been developed in accordance with an appropriate quality management system. The supplier should be assessed appropriately.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "4.5-validation-development-quality",
|
||||
"Section": "Project Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"config_recorder_all_regions_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "4.6-validation-quality-performance",
|
||||
"Name": "4.6 Validation - Quality and Performance",
|
||||
"Description": "For the validation of bespoke or customised computerised systems there should be a process in place that ensures the formal assessment and reporting of quality and performance measures for all the life-cycle stages of the system.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "4.6-validation-quality-performance",
|
||||
"Section": "Project Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"config_recorder_all_regions_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "4.8-validation-data-transfer",
|
||||
"Name": "4.8 Validation - Data Transfer",
|
||||
"Description": "If data are transferred to another data format or system, validation should include checks that data are not altered in value and/or meaning during this migration process.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "4.8-validation-data-transfer",
|
||||
"Section": "Project Phase",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
773
prowler/compliance/aws/hipaa_aws.json
Normal file
@@ -0,0 +1,773 @@
|
||||
{
|
||||
"Framework": "HIPAA",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "The Health Insurance Portability and Accountability Act of 1996 (HIPAA) is legislation that helps US workers to retain health insurance coverage when they change or lose jobs. The legislation also seeks to encourage electronic health records to improve the efficiency and quality of the US healthcare system through improved information sharing.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "164_308_a_1_ii_a",
|
||||
"Name": "164.308(a)(1)(ii)(A) Risk analysis",
|
||||
"Description": "Conduct an accurate and thorough assessment of the potential risks and vulnerabilities to the confidentiality, integrity, and availability of electronic protected health information held by the covered entity or business associate.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_1_ii_a",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"config_recorder_all_regions_enabled",
|
||||
"guardduty_is_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_1_ii_b",
|
||||
"Name": "164.308(a)(1)(ii)(B) Risk Management",
|
||||
"Description": "Implement security measures sufficient to reduce risks and vulnerabilities to a reasonable and appropriate level to comply with 164.306(a): Ensure the confidentiality, integrity, and availability of all electronic protected health information the covered entity or business associate creates, receives, maintains, or transmits.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_1_ii_b",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_default_encryption",
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_instance_older_than_specific_days",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"elbv2_deletion_protection",
|
||||
"elb_ssl_listeners",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_multi_az",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_audit_logging",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_bucket_object_versioning",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_1_ii_d",
|
||||
"Name": "164.308(a)(1)(ii)(D) Information system activity review",
|
||||
"Description": "Implement procedures to regularly review records of information system activity, such as audit logs, access reports, and security incident tracking reports.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_1_ii_d",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"securityhub_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_3_i",
|
||||
"Name": "164.308(a)(3)(i) Workforce security",
|
||||
"Description": "Implement policies and procedures to ensure that all members of its workforce have appropriate access to electronic protected health information, as provided under paragraph (a)(4) of this section, and to prevent those workforce members who do not have access under paragraph (a)(4) of this section from obtaining access to electronic protected health information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_3_i",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_3_ii_a",
|
||||
"Name": "164.308(a)(3)(ii)(A) Authorization and/or supervision",
|
||||
"Description": "Implement procedures for the authorization and/or supervision of workforce members who work with electronic protected health information or in locations where it might be accessed.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_3_ii_a",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"securityhub_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_3_ii_b",
|
||||
"Name": "164.308(a)(3)(ii)(B) Workforce clearance procedure",
|
||||
"Description": "Implement procedures to determine that the access of a workforce member to electronic protected health information is appropriate.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_3_ii_b",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_disable_90_days_credentials"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_3_ii_c",
|
||||
"Name": "164.308(a)(3)(ii)(C) Termination procedures",
|
||||
"Description": "Implement procedures for terminating access to electronic protected health information when the employment of, or other arrangement with, a workforce member ends or as required by determinations made as specified in paragraph (a)(3)(ii)(b).",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_3_ii_c",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_rotate_access_key_90_days"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_4_i",
|
||||
"Name": "164.308(a)(4)(i) Information access management",
|
||||
"Description": "Implement policies and procedures for authorizing access to electronic protected health information that are consistent with the applicable requirements of subpart E of this part.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_4_i",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_4_ii_a",
|
||||
"Name": "164.308(a)(4)(ii)(A) Isolating health care clearinghouse functions",
|
||||
"Description": "If a health care clearinghouse is part of a larger organization, the clearinghouse must implement policies and procedures that protect the electronic protected health information of the clearinghouse from unauthorized access by the larger organization.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_4_ii_a",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"acm_certificates_expiration_check",
|
||||
"cloudfront_distributions_https_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"dynamodb_accelerator_cluster_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_default_encryption",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"eks_cluster_kms_cmk_encryption_in_secrets_enabled",
|
||||
"elb_ssl_listeners",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"rds_instance_storage_encrypted",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_default_encryption",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_4_ii_b",
|
||||
"Name": "164.308(a)(4)(ii)(B) Access authorization",
|
||||
"Description": "Implement policies and procedures for granting access to electronic protected health information, As one illustrative example, through access to a workstation, transaction, program, process, or other mechanism.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_4_ii_b",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_4_ii_c",
|
||||
"Name": "164.308(a)(4)(ii)(B) Access authorization",
|
||||
"Description": "Implement policies and procedures that, based upon the covered entity's or the business associate's access authorization policies, establish, document, review, and modify a user's right of access to a workstation, transaction, program, or process.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_4_ii_c",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_reuse_24",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_disable_90_days_credentials",
|
||||
"secretsmanager_automatic_rotation_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_5_ii_b",
|
||||
"Name": "164.308(a)(5)(ii)(B) Protection from malicious software",
|
||||
"Description": "Procedures for guarding against, detecting, and reporting malicious software.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_5_ii_b",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"ssm_managed_compliant_patching",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_5_ii_c",
|
||||
"Name": "164.308(a)(5)(ii)(C) Log-in monitoring",
|
||||
"Description": "Procedures for monitoring log-in attempts and reporting discrepancies.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_5_ii_c",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"cloudwatch_log_metric_filter_authentication_failures",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_5_ii_d",
|
||||
"Name": "164.308(a)(5)(ii)(D) Password management",
|
||||
"Description": "Procedures for creating, changing, and safeguarding passwords.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_5_ii_d",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_password_policy_reuse_24",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_disable_90_days_credentials"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_6_i",
|
||||
"Name": "164.308(a)(6)(i) Security incident procedures",
|
||||
"Description": "Implement policies and procedures to address security incidents.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_6_i",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"cloudwatch_changes_to_network_gateways_alarm_configured",
|
||||
"cloudwatch_changes_to_network_route_tables_alarm_configured",
|
||||
"cloudwatch_changes_to_vpcs_alarm_configured",
|
||||
"guardduty_is_enabled",
|
||||
"cloudwatch_log_metric_filter_authentication_failures",
|
||||
"cloudwatch_log_metric_filter_root_usage",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_6_ii",
|
||||
"Name": "164.308(a)(6)(ii) Response and reporting",
|
||||
"Description": "Identify and respond to suspected or known security incidents; mitigate, to the extent practicable, harmful effects of security incidents that are known to the covered entity or business associate; and document security incidents and their outcomes.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_6_ii",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"guardduty_no_high_severity_findings",
|
||||
"cloudwatch_log_metric_filter_authentication_failures",
|
||||
"cloudwatch_log_metric_filter_root_usage",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"securityhub_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_7_i",
|
||||
"Name": "164.308(a)(7)(i) Contingency plan",
|
||||
"Description": "Establish (and implement as needed) policies and procedures for responding to an emergency or other occurrence (for example, fire, vandalism, system failure, and natural disaster) that damages systems that contain electronic protected health information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_7_i",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_multi_az",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_7_ii_a",
|
||||
"Name": "164.308(a)(7)(ii)(A) Data backup plan",
|
||||
"Description": "Establish and implement procedures to create and maintain retrievable exact copies of electronic protected health information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_7_ii_a",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_multi_az",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_7_ii_b",
|
||||
"Name": "164.308(a)(7)(ii)(B) Disaster recovery plan",
|
||||
"Description": "Establish (and implement as needed) procedures to restore any loss of data.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_7_ii_b",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_multi_az",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_7_ii_c",
|
||||
"Name": "164.308(a)(7)(ii)(C) Emergency mode operation plan",
|
||||
"Description": "Establish (and implement as needed) procedures to enable continuation of critical business processes for protection of the security of electronic protected health information while operating in emergency mode.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_7_ii_c",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_multi_az",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_308_a_8",
|
||||
"Name": "164.308(a)(8) Evaluation",
|
||||
"Description": "Perform a periodic technical and nontechnical evaluation, based initially upon the standards implemented under this rule and subsequently, in response to environmental or operational changes affecting the security of electronic protected health information, that establishes the extent to which an entity's security policies and procedures meet the requirements of this subpart.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_308_a_8",
|
||||
"Section": "164.308 Administrative Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_312_a_1",
|
||||
"Name": "164.312(a)(1) Access control",
|
||||
"Description": "Implement technical policies and procedures for electronic information systems that maintain electronic protected health information to allow access only to those persons or software programs that have been granted access rights as specified in 164.308(a)(4).",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_312_a_1",
|
||||
"Section": "164.312 Technical Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_instance_public_ip",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_bucket_public_access",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_312_a_2_i",
|
||||
"Name": "164.312(a)(2)(i) Unique user identification",
|
||||
"Description": "Assign a unique name and/or number for identifying and tracking user identity.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_312_a_2_i",
|
||||
"Section": "164.312 Technical Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"iam_no_root_access_key",
|
||||
"s3_bucket_public_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_312_a_2_ii",
|
||||
"Name": "164.312(a)(2)(ii) Emergency access procedure",
|
||||
"Description": "Establish (and implement as needed) procedures for obtaining necessary electronic protected health information during an emergency.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_312_a_2_ii",
|
||||
"Section": "164.312 Technical Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_312_a_2_iv",
|
||||
"Name": "164.312(a)(2)(iv) Encryption and decryption",
|
||||
"Description": "Implement a mechanism to encrypt and decrypt electronic protected health information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_312_a_2_iv",
|
||||
"Section": "164.312 Technical Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"dynamodb_accelerator_cluster_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_default_encryption",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"eks_cluster_kms_cmk_encryption_in_secrets_enabled",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"kms_cmk_rotation_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_storage_encrypted",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_312_b",
|
||||
"Name": "164.312(b) Audit controls",
|
||||
"Description": "Implement hardware, software, and/or procedural mechanisms that record and examine activity in information systems that contain or use electronic protected health information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_312_b",
|
||||
"Section": "164.312 Technical Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"cloudwatch_log_group_retention_policy_specific_days_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"securityhub_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_312_c_1",
|
||||
"Name": "164.312(c)(1) Integrity",
|
||||
"Description": "Implement policies and procedures to protect electronic protected health information from improper alteration or destruction.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_312_c_1",
|
||||
"Section": "164.312 Technical Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_312_c_2",
|
||||
"Name": "164.312(c)(2) Mechanism to authenticate electronic protected health information",
|
||||
"Description": "Implement electronic mechanisms to corroborate that electronic protected health information has not been altered or destroyed in an unauthorized manner.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_312_c_2",
|
||||
"Section": "164.312 Technical Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_object_versioning",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_312_d",
|
||||
"Name": "164.312(d) Person or entity authentication",
|
||||
"Description": "Implement procedures to verify that a person or entity seeking access to electronic protected health information is the one claimed.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_312_d",
|
||||
"Section": "164.312 Technical Safeguards",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_reuse_24",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_user_mfa_enabled_console_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_312_e_1",
|
||||
"Name": "164.312(e)(1) Transmission security",
|
||||
"Description": "Implement technical security measures to guard against unauthorized access to electronic protected health information that is being transmitted over an electronic communications network.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_312_e_1",
|
||||
"Section": "164.312 Technical Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"acm_certificates_expiration_check",
|
||||
"cloudfront_distributions_https_enabled",
|
||||
"elb_ssl_listeners",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_312_e_2_i",
|
||||
"Name": "164.312(e)(2)(i) Integrity controls",
|
||||
"Description": "Implement security measures to ensure that electronically transmitted electronic protected health information is not improperly modified without detection until disposed of.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_312_e_2_i",
|
||||
"Section": "164.312 Technical Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elb_ssl_listeners",
|
||||
"guardduty_is_enabled",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "164_312_e_2_ii",
|
||||
"Name": "164.312(e)(2)(ii) Encryption",
|
||||
"Description": "Implement a mechanism to encrypt electronic protected health information whenever deemed appropriate.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "164_312_e_2_ii",
|
||||
"Section": "164.312 Technical Safeguards",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"dynamodb_accelerator_cluster_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_default_encryption",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"eks_cluster_kms_cmk_encryption_in_secrets_enabled",
|
||||
"elb_ssl_listeners",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_storage_encrypted",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
1096
prowler/compliance/aws/nist_800_171_revision_2_aws.json
Normal file
1247
prowler/compliance/aws/nist_800_53_revision_4_aws.json
Normal file
6291
prowler/compliance/aws/nist_800_53_revision_5_aws.json
Normal file
1214
prowler/compliance/aws/nist_csf_1.1_aws.json
Normal file
293
prowler/compliance/aws/pci_3.2.1_aws.json
Normal file
@@ -0,0 +1,293 @@
|
||||
{
|
||||
"Framework": "PCI",
|
||||
"Version": "3.2.1",
|
||||
"Provider": "AWS",
|
||||
"Description": "The Payment Card Industry Data Security Standard (PCI DSS) is a proprietary information security standard. It's administered by the PCI Security Standards Council, which was founded by American Express, Discover Financial Services, JCB International, MasterCard Worldwide, and Visa Inc. PCI DSS applies to entities that store, process, or transmit cardholder data (CHD) or sensitive authentication data (SAD). This includes, but isn't limited to, merchants, processors, acquirers, issuers, and service providers. The PCI DSS is mandated by the card brands and administered by the Payment Card Industry Security Standards Council.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "autoscaling",
|
||||
"Name": "Auto Scaling",
|
||||
"Description": "This control checks whether your Auto Scaling groups that are associated with a load balancer are using Elastic Load Balancing health checks. PCI DSS does not require load balancing or highly available configurations. However, this check aligns with AWS best practices.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "autoscaling",
|
||||
"Service": "autoscaling"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cloudtrail",
|
||||
"Name": "CloudTrail",
|
||||
"Description": "This section contains recommendations for configuring CloudTrail resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cloudtrail",
|
||||
"Service": "cloudtrail"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "codebuild",
|
||||
"Name": "CodeBuild",
|
||||
"Description": "This section contains recommendations for configuring CodeBuild resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "codebuild",
|
||||
"Service": "codebuild"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "config",
|
||||
"Name": "Config",
|
||||
"Description": "This section contains recommendations for configuring AWS Config.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "config",
|
||||
"Service": "config"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"config_recorder_all_regions_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cw",
|
||||
"Name": "CloudWatch",
|
||||
"Description": "This section contains recommendations for configuring CloudWatch resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cw",
|
||||
"Service": "cloudwatch"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudwatch_log_metric_filter_root_usage"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "dms",
|
||||
"Name": "DMS",
|
||||
"Description": "This section contains recommendations for configuring AWS DMS resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "dms",
|
||||
"Service": "dms"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "ec2",
|
||||
"Name": "EC2",
|
||||
"Description": "This section contains recommendations for configuring EC2 resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ec2",
|
||||
"Service": "ec2"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_elastic_ip_unassgined",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "elbv2",
|
||||
"Name": "ELBV2",
|
||||
"Description": "This section contains recommendations for configuring Elastic Load Balancer resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "elbv2",
|
||||
"Service": "elbv2"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "elasticsearch",
|
||||
"Name": "Elasticsearch",
|
||||
"Description": "This section contains recommendations for configuring Elasticsearch resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "elasticsearch",
|
||||
"Service": "elasticsearch"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"opensearch_service_domains_encryption_at_rest_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "guardduty",
|
||||
"Name": "GuardDuty",
|
||||
"Description": "This section contains recommendations for configuring AWS GuardDuty resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "guardduty",
|
||||
"Service": "guardduty"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "iam",
|
||||
"Name": "IAM",
|
||||
"Description": "This section contains recommendations for configuring AWS IAM resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "iam",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_no_root_access_key",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_disable_90_days_credentials",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "kms",
|
||||
"Name": "KMS",
|
||||
"Description": "This section contains recommendations for configuring AWS KMS resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "kms",
|
||||
"Service": "kms"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"kms_cmk_rotation_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "lambda",
|
||||
"Name": "Lambda",
|
||||
"Description": "This section contains recommendations for configuring Lambda resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "lambda",
|
||||
"Service": "lambda"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"awslambda_function_url_public",
|
||||
"awslambda_function_not_publicly_accessible"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "opensearch",
|
||||
"Name": "OpenSearch",
|
||||
"Description": "This section contains recommendations for configuring OpenSearch resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "opensearch",
|
||||
"Service": "opensearch"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"opensearch_service_domains_encryption_at_rest_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "rds",
|
||||
"Name": "RDS",
|
||||
"Description": "This section contains recommendations for configuring AWS RDS resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "rds",
|
||||
"Service": "rds"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_snapshots_public_access",
|
||||
"rds_instance_no_public_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "redshift",
|
||||
"Name": "Redshift",
|
||||
"Description": "This section contains recommendations for configuring AWS Redshift resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "redshift",
|
||||
"Service": "redshift"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"redshift_cluster_public_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "s3",
|
||||
"Name": "S3",
|
||||
"Description": "This section contains recommendations for configuring AWS S3 resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "s3",
|
||||
"Service": "s3"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_public_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "sagemaker",
|
||||
"Name": "SageMaker",
|
||||
"Description": "This section contains recommendations for configuring AWS Sagemaker resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "sagemaker",
|
||||
"Service": "sagemaker"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ssm",
|
||||
"Name": "SSM",
|
||||
"Description": "This section contains recommendations for configuring AWS SSM resources and options.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "ssm",
|
||||
"Service": "ssm"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ssm_managed_compliant_patching",
|
||||
"ssm_managed_compliant_patching",
|
||||
"ec2_instance_managed_by_ssm"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
198
prowler/compliance/aws/rbi_cyber_security_framework_aws.json
Normal file
@@ -0,0 +1,198 @@
|
||||
{
|
||||
"Framework": "RBI-Cyber-Security-Framework",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "The Reserve Bank had prescribed a set of baseline cyber security controls for primary (Urban) cooperative banks (UCBs) in October 2018. On further examination, it has been decided to prescribe a comprehensive cyber security framework for the UCBs, as a graded approach, based on their digital depth and interconnectedness with the payment systems landscape, digital products offered by them and assessment of cyber security risk. The framework would mandate implementation of progressively stronger security measures based on the nature, variety and scale of digital product offerings of banks.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "annex_i_1_1",
|
||||
"Name": "Annex I (1.1)",
|
||||
"Description": "UCBs should maintain an up-to-date business IT Asset Inventory Register containing the following fields, as a minimum: a) Details of the IT Asset (viz., hardware/software/network devices, key personnel, services, etc.), b. Details of systems where customer data are stored, c. Associated business applications, if any, d. Criticality of the IT asset (For example, High/Medium/Low).",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "annex_i_1_1",
|
||||
"Service": "ec2"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_managed_by_ssm"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "annex_i_1_3",
|
||||
"Name": "Annex I (1.3)",
|
||||
"Description": "Appropriately manage and provide protection within and outside UCB/network, keeping in mind how the data/information is stored, transmitted, processed, accessed and put to use within/outside the UCB’s network, and level of risk they are exposed to depending on the sensitivity of the data/information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "annex_i_1_3",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"acm_certificates_expiration_check",
|
||||
"apigateway_client_certificate_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_instance_public_ip",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"elbv2_insecure_ssl_ciphers",
|
||||
"elb_ssl_listeners",
|
||||
"emr_cluster_master_nodes_no_public_ip",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled",
|
||||
"opensearch_service_domains_node_to_node_encryption_enabled",
|
||||
"kms_cmk_rotation_enabled",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"awslambda_function_url_public",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_snapshots_public_access",
|
||||
"redshift_cluster_audit_logging",
|
||||
"redshift_cluster_public_access",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_bucket_public_access",
|
||||
"sagemaker_notebook_instance_without_direct_internet_access_configured",
|
||||
"sagemaker_notebook_instance_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "annex_i_5_1",
|
||||
"Name": "Annex I (5.1)",
|
||||
"Description": "The firewall configurations should be set to the highest security level and evaluation of critical device (such as firewall, network switches, security devices, etc.) configurations should be done periodically.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "annex_i_5_1",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_waf_acl_attached",
|
||||
"elbv2_waf_acl_attached",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_any_port"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "annex_i_6",
|
||||
"Name": "Annex I (6)",
|
||||
"Description": "Put in place systems and processes to identify, track, manage and monitor the status of patches to servers, operating system and application software running at the systems used by the UCB officials (end-users). Implement and update antivirus protection for all servers and applicable end points preferably through a centralised system.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "annex_i_6",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_no_high_severity_findings",
|
||||
"rds_instance_minor_version_upgrade_enabled",
|
||||
"redshift_cluster_automatic_upgrades",
|
||||
"ssm_managed_compliant_patching",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "annex_i_7_1",
|
||||
"Name": "Annex I (7.1)",
|
||||
"Description": "Disallow administrative rights on end-user workstations/PCs/laptops and provide access rights on a ‘need to know’ and ‘need to do’ basis.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "annex_i_7_1",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "annex_i_7_2",
|
||||
"Name": "Annex I (7.2)",
|
||||
"Description": "Passwords should be set as complex and lengthy and users should not use same passwords for all the applications/systems/devices.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "annex_i_7_2",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_reuse_24"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "annex_i_7_3",
|
||||
"Name": "Annex I (7.3)",
|
||||
"Description": "Remote Desktop Protocol (RDP) which allows others to access the computer remotely over a network or over the internet should be always disabled and should be enabled only with the approval of the authorised officer of the UCB. Logs for such remote access shall be enabled and monitored for suspicious activities.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "annex_i_7_3",
|
||||
"Service": "vpc"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "annex_i_7_4",
|
||||
"Name": "Annex I (7.4)",
|
||||
"Description": "Implement appropriate (e.g. centralised) systems and controls to allow, manage, log and monitor privileged/super user/administrative access to critical systems (servers/databases, applications, network devices etc.)",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "annex_i_7_4",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudwatch_log_group_retention_policy_specific_days_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"opensearch_service_domains_cloudwatch_logging_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"redshift_cluster_audit_logging",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"securityhub_enabled",
|
||||
"vpc_flow_logs_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "annex_i_12",
|
||||
"Name": "Annex I (12)",
|
||||
"Description": "Take periodic back up of the important data and store this data ‘off line’ (i.e., transferring important files to a storage device that can be detached from a computer/system after copying all the files).",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "annex_i_12",
|
||||
"Service": "aws"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
914
prowler/compliance/aws/soc2_aws.json
Normal file
@@ -0,0 +1,914 @@
|
||||
{
|
||||
"Framework": "SOC2",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "System and Organization Controls (SOC), defined by the American Institute of Certified Public Accountants (AICPA), is the name of a set of reports that's produced during an audit. It's intended for use by service organizations (organizations that provide information systems as a service to other organizations) to issue validated reports of internal controls over those information systems to the users of those services. The reports focus on controls grouped into five categories known as Trust Service Principles.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "cc_1_1",
|
||||
"Name": "CC1.1 COSO Principle 1: The entity demonstrates a commitment to integrity and ethical values",
|
||||
"Description": "Sets the Tone at the Top - The board of directors and management, at all levels, demonstrate through their directives, actions, and behavior the importance of integrity and ethical values to support the functioning of the system of internal control.Establishes Standards of Conduct - The expectations of the board of directors and senior management concerning integrity and ethical values are defined in the entity’s standards of conduct and understood at all levels of the entity and by outsourced service providers and business partners. Evaluates Adherence to Standards of Conduct - Processes are in place to evaluate the performance of individuals and teams against the entity’s expected standards of conduct. Addresses Deviations in a Timely Manner - Deviations from the entity’s expected standards of conduct are identified and remedied in a timely and consistent manner.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_1_1",
|
||||
"Section": "CC1.0 - Common Criteria Related to Control Environment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_1_2",
|
||||
"Name": "CC1.2 COSO Principle 2: The board of directors demonstrates independence from management and exercises oversight of the development and performance of internal control",
|
||||
"Description": "Establishes Oversight Responsibilities - The board of directors identifies and accepts its oversight responsibilities in relation to established requirements and expectations. Applies Relevant Expertise - The board of directors defines, maintains, and periodically evaluates the skills and expertise needed among its members to enable them to ask probing questions of senior management and take commensurate action. Operates Independently - The board of directors has sufficient members who are independent from management and objective in evaluations and decision making. Additional point of focus specifically related to all engagements using the trust services criteria: Supplements Board Expertise - The board of directors supplements its expertise relevant to security, availability, processing integrity, confidentiality, and privacy, as needed, through the use of a subcommittee or consultants.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_1_2",
|
||||
"Section": "CC1.0 - Common Criteria Related to Control Environment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_1_3",
|
||||
"Name": "CC1.3 COSO Principle 3: Management establishes, with board oversight, structures, reporting lines, and appropriate authorities and responsibilities in the pursuit of objectives",
|
||||
"Description": "Considers All Structures of the Entity - Management and the board of directors consider the multiple structures used (including operating units, legal entities, geographic distribution, and outsourced service providers) to support the achievement of objectives. Establishes Reporting Lines - Management designs and evaluates lines of reporting for each entity structure to enable execution of authorities and responsibilities and flow of information to manage the activities of the entity. Defines, Assigns, and Limits Authorities and Responsibilities - Management and the board of directors delegate authority, define responsibilities, and use appropriate processes and technology to assign responsibility and segregate duties as necessary at the various levels of the organization. Additional points of focus specifically related to all engagements using the trust services criteria: Addresses Specific Requirements When Defining Authorities and Responsibilities—Management and the board of directors consider requirements relevant to security, availability, processing integrity, confidentiality, and privacy when defining authorities and responsibilities. Considers Interactions With External Parties When Establishing Structures, Reporting Lines, Authorities, and Responsibilities — Management and the board of directors consider the need for the entity to interact with and monitor the activities of external parties when establishing structures, reporting lines, authorities, and responsibilities.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_1_3",
|
||||
"Section": "CC1.0 - Common Criteria Related to Control Environment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_policy_no_administrative_privileges",
|
||||
"iam_disable_90_days_credentials"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_1_4",
|
||||
"Name": "CC1.4 COSO Principle 4: The entity demonstrates a commitment to attract, develop, and retain competent individuals in alignment with objectives",
|
||||
"Description": "Establishes Policies and Practices - Policies and practices reflect expectations of competence necessary to support the achievement of objectives. Evaluates Competence and Addresses Shortcomings - The board of directors and management evaluate competence across the entity and in outsourced service providers in relation to established policies and practices and act as necessary to address shortcomings.Attracts, Develops, and Retains Individuals - The entity provides the mentoring and training needed to attract, develop, and retain sufficient and competent personnel and outsourced service providers to support the achievement of objectives.Plans and Prepares for Succession - Senior management and the board of directors develop contingency plans for assignments of responsibility important for internal control.Additional point of focus specifically related to all engagements using the trust services criteria:Considers the Background of Individuals - The entity considers the background of potential and existing personnel, contractors, and vendor employees when determining whether to employ and retain the individuals.Considers the Technical Competency of Individuals - The entity considers the technical competency of potential and existing personnel, contractors, and vendor employees when determining whether to employ and retain the individuals.Provides Training to Maintain Technical Competencies - The entity provides training programs, including continuing education and training, to ensure skill sets and technical competency of existing personnel, contractors, and vendor employees are developed and maintained.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_1_4",
|
||||
"Section": "CC1.0 - Common Criteria Related to Control Environment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_1_5",
|
||||
"Name": "CC1.5 COSO Principle 5: The entity holds individuals accountable for their internal control responsibilities in the pursuit of objectives",
|
||||
"Description": "Enforces Accountability Through Structures, Authorities, and Responsibilities - Management and the board of directors establish the mechanisms to communicate and hold individuals accountable for performance of internal control responsibilities across the entity and implement corrective action as necessary. Establishes Performance Measures, Incentives, and Rewards - Management and the board of directors establish performance measures, incentives, and other rewards appropriate for responsibilities at all levels of the entity, reflecting appropriate dimensions of performance and expected standards of conduct, and considering the achievement of both short-term and longer-term objectives.Evaluates Performance Measures, Incentives, and Rewards for Ongoing Relevance - Management and the board of directors align incentives and rewards with the fulfillment of internal control responsibilities in the achievement of objectives.Considers Excessive Pressures - Management and the board of directors evaluate and adjust pressures associated with the achievement of objectives as they assign responsibilities, develop performance measures, and evaluate performance. Evaluates Performance and Rewards or Disciplines Individuals - Management and the board of directors evaluate performance of internal control responsibilities, including adherence to standards of conduct and expected levels of competence, and provide rewards or exercise disciplinary action, as appropriate.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_1_5",
|
||||
"Section": "CC1.0 - Common Criteria Related to Control Environment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_2_1",
|
||||
"Name": "CC2.1 COSO Principle 13: The entity obtains or generates and uses relevant, quality information to support the functioning of internal control",
|
||||
"Description": "Identifies Information Requirements - A process is in place to identify the information required and expected to support the functioning of the other components of internal control and the achievement of the entity’s objectives. Captures Internal and External Sources of Data - Information systems capture internal and external sources of data. Processes Relevant Data Into Information - Information systems process and transform relevant data into information. Maintains Quality Throughout Processing - Information systems produce information that is timely, current, accurate, complete, accessible, protected, verifiable, and retained. Information is reviewed to assess its relevance in supporting the internal control components.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_2_1",
|
||||
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"config_recorder_all_regions_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_2_2",
|
||||
"Name": "CC2.2 COSO Principle 14: The entity internally communicates information, including objectives and responsibilities for internal control, necessary to support the functioning of internal control",
|
||||
"Description": "Communicates Internal Control Information - A process is in place to communicate required information to enable all personnel to understand and carry out their internal control responsibilities. Communicates With the Board of Directors - Communication exists between management and the board of directors so that both have information needed to fulfill their roles with respect to the entity’s objectives. Provides Separate Communication Lines - Separate communication channels, such as whistle-blower hotlines, are in place and serve as fail-safe mechanisms to enable anonymous or confidential communication when normal channels are inoperative or ineffective. Selects Relevant Method of Communication - The method of communication considers the timing, audience, and nature of the information. Additional point of focus specifically related to all engagements using the trust services criteria: Communicates Responsibilities - Entity personnel with responsibility for designing, developing, implementing,operating, maintaining, or monitoring system controls receive communications about their responsibilities, including changes in their responsibilities, and have the information necessary to carry out those responsibilities. Communicates Information on Reporting Failures, Incidents, Concerns, and Other Matters—Entity personnel are provided with information on how to report systems failures, incidents, concerns, and other complaints to personnel. Communicates Objectives and Changes to Objectives - The entity communicates its objectives and changes to those objectives to personnel in a timely manner. Communicates Information to Improve Security Knowledge and Awareness - The entity communicates information to improve security knowledge and awareness and to model appropriate security behaviors to personnel through a security awareness training program. Additional points of focus that apply only when an engagement using the trust services criteria is performed at the system level: Communicates Information About System Operation and Boundaries - The entity prepares and communicates information about the design and operation of the system and its boundaries to authorized personnel to enable them to understand their role in the system and the results of system operation. Communicates System Objectives - The entity communicates its objectives to personnel to enable them to carry out their responsibilities. Communicates System Changes - System changes that affect responsibilities or the achievement of the entity's objectives are communicated in a timely manner.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_2_2",
|
||||
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_2_3",
|
||||
"Name": "CC2.3 COSO Principle 15: The entity communicates with external parties regarding matters affecting the functioning of internal control",
|
||||
"Description": "Communicates to External Parties - Processes are in place to communicate relevant and timely information to external parties, including shareholders, partners, owners, regulators, customers, financial analysts, and other external parties. Enables Inbound Communications - Open communication channels allow input from customers, consumers, suppliers, external auditors, regulators, financial analysts, and others, providing management and the board of directors with relevant information. Communicates With the Board of Directors - Relevant information resulting from assessments conducted by external parties is communicated to the board of directors. Provides Separate Communication Lines - Separate communication channels, such as whistle-blower hotlines, are in place and serve as fail-safe mechanisms to enable anonymous or confidential communication when normal channels are inoperative or ineffective. Selects Relevant Method of Communication - The method of communication considers the timing, audience, and nature of the communication and legal, regulatory, and fiduciary requirements and expectations. Communicates Objectives Related to Confidentiality and Changes to Objectives - The entity communicates, to external users, vendors, business partners and others whose products and services are part of the system, objectives and changes to objectives related to confidentiality. Additional point of focus that applies only to an engagement using the trust services criteria for privacy: Communicates Objectives Related to Privacy and Changes to Objectives - The entity communicates, to external users, vendors, business partners and others whose products and services are part of the system, objectives related to privacy and changes to those objectives. Additional points of focus that apply only when an engagement using the trust services criteria is performed at the system level: Communicates Information About System Operation and Boundaries - The entity prepares and communicates information about the design and operation of the system and its boundaries to authorized external users to permit users to understand their role in the system and the results of system operation. Communicates System Objectives - The entity communicates its system objectives to appropriate external users. Communicates System Responsibilities - External users with responsibility for designing, developing, implementing, operating, maintaining, and monitoring system controls receive communications about their responsibilities and have the information necessary to carry out those responsibilities. Communicates Information on Reporting System Failures, Incidents, Concerns, and Other Matters - External users are provided with information on how to report systems failures, incidents, concerns, and other complaints to appropriate personnel.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_2_3",
|
||||
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_3_1",
|
||||
"Name": "CC3.1 COSO Principle 6: The entity specifies objectives with sufficient clarity to enable the identification and assessment of risks relating to objectives",
|
||||
"Description": "Operations Ojectives:Reflects Management's Choices - Operations objectives reflect management's choices about structure, industry considerations, and performance of the entity.Considers Tolerances for Risk - Management considers the acceptable levels of variation relative to the achievement of operations objectives.External Financial Reporting Objectives:Complies With Applicable Accounting Standards - Financial reporting objectives are consistent with accounting principles suitable and available for that entity. The accounting principles selected are appropriate in the circumstances.External Nonfinancial Reporting Objectives:Complies With Externally Established Frameworks - Management establishes objectives consistent with laws and regulations or standards and frameworks of recognized external organizations.Reflects Entity Activities - External reporting reflects the underlying transactions and events within a range of acceptable limits.Considers the Required Level of Precision—Management reflects the required level of precision and accuracy suitable for user needs and based on criteria established by third parties in nonfinancial reporting.Internal Reporting Objectives:Reflects Management's Choices - Internal reporting provides management with accurate and complete information regarding management's choices and information needed in managing the entity.Considers the Required Level of Precision—Management reflects the required level of precision and accuracy suitable for user needs in nonfinancial reporting objectives and materiality within financial reporting objectives.Reflects Entity Activities—Internal reporting reflects the underlying transactions and events within a range of acceptable limits.Compliance Objectives:Reflects External Laws and Regulations - Laws and regulations establish minimum standards of conduct, which the entity integrates into compliance objectives.Considers Tolerances for Risk - Management considers the acceptable levels of variation relative to the achievement of operations objectives.Additional point of focus specifically related to all engagements using the trust services criteria: Establishes Sub-objectives to Support Objectives—Management identifies sub-objectives related to security, availability, processing integrity, confidentiality, and privacy to support the achievement of the entity’s objectives related to reporting, operations, and compliance.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_3_1",
|
||||
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled",
|
||||
"config_recorder_all_regions_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_3_2",
|
||||
"Name": "CC3.2 COSO Principle 7: The entity identifies risks to the achievement of its objectives across the entity and analyzes risks as a basis for determining how the risks should be managed",
|
||||
"Description": "Includes Entity, Subsidiary, Division, Operating Unit, and Functional Levels - The entity identifies and assesses risk at the entity, subsidiary, division, operating unit, and functional levels relevant to the achievement of objectives.Analyzes Internal and External Factors - Risk identification considers both internal and external factors and their impact on the achievement of objectives.Involves Appropriate Levels of Management - The entity puts into place effective risk assessment mechanisms that involve appropriate levels of management.Estimates Significance of Risks Identified - Identified risks are analyzed through a process that includes estimating the potential significance of the risk.Determines How to Respond to Risks - Risk assessment includes considering how the risk should be managed and whether to accept, avoid, reduce, or share the risk.Additional points of focus specifically related to all engagements using the trust services criteria:Identifies and Assesses Criticality of Information Assets and Identifies Threats and Vulnerabilities - The entity's risk identification and assessment process includes (1) identifying information assets, including physical devices and systems, virtual devices, software, data and data flows, external information systems, and organizational roles; (2) assessing the criticality of those information assets; (3) identifying the threats to the assets from intentional (including malicious) and unintentional acts and environmental events; and (4) identifying the vulnerabilities of the identified assets.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_3_2",
|
||||
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"ssm_managed_compliant_patching",
|
||||
"guardduty_no_high_severity_findings",
|
||||
"guardduty_is_enabled",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_3_3",
|
||||
"Name": "CC3.3 COSO Principle 8: The entity considers the potential for fraud in assessing risks to the achievement of objectives",
|
||||
"Description": "Considers Various Types of Fraud - The assessment of fraud considers fraudulent reporting, possible loss of assets, and corruption resulting from the various ways that fraud and misconduct can occur.Assesses Incentives and Pressures - The assessment of fraud risks considers incentives and pressures.Assesses Opportunities - The assessment of fraud risk considers opportunities for unauthorized acquisition,use, or disposal of assets, altering the entity’s reporting records, or committing other inappropriate acts.Assesses Attitudes and Rationalizations - The assessment of fraud risk considers how management and other personnel might engage in or justify inappropriate actions.Additional point of focus specifically related to all engagements using the trust services criteria: Considers the Risks Related to the Use of IT and Access to Information - The assessment of fraud risks includes consideration of threats and vulnerabilities that arise specifically from the use of IT and access to information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_3_3",
|
||||
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_3_4",
|
||||
"Name": "CC3.4 COSO Principle 9: The entity identifies and assesses changes that could significantly impact the system of internal control",
|
||||
"Description": "Assesses Changes in the External Environment - The risk identification process considers changes to the regulatory, economic, and physical environment in which the entity operates.Assesses Changes in the Business Model - The entity considers the potential impacts of new business lines, dramatically altered compositions of existing business lines, acquired or divested business operations on the system of internal control, rapid growth, changing reliance on foreign geographies, and new technologies.Assesses Changes in Leadership - The entity considers changes in management and respective attitudes and philosophies on the system of internal control.Assess Changes in Systems and Technology - The risk identification process considers changes arising from changes in the entity’s systems and changes in the technology environment.Assess Changes in Vendor and Business Partner Relationships - The risk identification process considers changes in vendor and business partner relationships.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_3_4",
|
||||
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
|
||||
"Service": "config",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"config_recorder_all_regions_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_4_1",
|
||||
"Name": "CC4.1 COSO Principle 16: The entity selects, develops, and performs ongoing and/or separate evaluations to ascertain whether the components of internal control are present and functioning",
|
||||
"Description": "Considers a Mix of Ongoing and Separate Evaluations - Management includes a balance of ongoing and separate evaluations.Considers Rate of Change - Management considers the rate of change in business and business processes when selecting and developing ongoing and separate evaluations.Establishes Baseline Understanding - The design and current state of an internal control system are used to establish a baseline for ongoing and separate evaluations.Uses Knowledgeable Personnel - Evaluators performing ongoing and separate evaluations have sufficient knowledge to understand what is being evaluated.Integrates With Business Processes - Ongoing evaluations are built into the business processes and adjust to changing conditions.Adjusts Scope and Frequency—Management varies the scope and frequency of separate evaluations depending on risk.Objectively Evaluates - Separate evaluations are performed periodically to provide objective feedback.Considers Different Types of Ongoing and Separate Evaluations - Management uses a variety of different types of ongoing and separate evaluations, including penetration testing, independent certification made against established specifications (for example, ISO certifications), and internal audit assessments.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_4_1",
|
||||
"Section": "CC4.0 - Monitoring Activities",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_4_2",
|
||||
"Name": "CC4.2 COSO Principle 17: The entity evaluates and communicates internal control deficiencies in a timely manner to those parties responsible for taking corrective action, including senior management and the board of directors, as appropriate",
|
||||
"Description": "Assesses Results - Management and the board of directors, as appropriate, assess results of ongoing and separate evaluations.Communicates Deficiencies - Deficiencies are communicated to parties responsible for taking corrective action and to senior management and the board of directors, as appropriate.Monitors Corrective Action - Management tracks whether deficiencies are remedied on a timely basis.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_4_2",
|
||||
"Section": "CC4.0 - Monitoring Activities",
|
||||
"Service": "guardduty",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"guardduty_no_high_severity_findings"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_5_1",
|
||||
"Name": "CC5.1 COSO Principle 10: The entity selects and develops control activities that contribute to the mitigation of risks to the achievement of objectives to acceptable levels",
|
||||
"Description": "Integrates With Risk Assessment - Control activities help ensure that risk responses that address and mitigate risks are carried out.Considers Entity-Specific Factors - Management considers how the environment, complexity, nature, and scope of its operations, as well as the specific characteristics of its organization, affect the selection and development of control activities.Determines Relevant Business Processes - Management determines which relevant business processes require control activities.Evaluates a Mix of 2017 Data Submitted Types - Control activities include a range and variety of controls and may include a balance of approaches to mitigate risks, considering both manual and automated controls, and preventive and detective controls.Considers at What Level Activities Are Applied - Management considers control activities at various levels in the entity.Addresses Segregation of Duties - Management segregates incompatible duties, and where such segregation is not practical, management selects and develops alternative control activities.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_5_1",
|
||||
"Section": "CC5.0 - Control Activities",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_5_2",
|
||||
"Name": "CC5.2 COSO Principle 11: The entity also selects and develops general control activities over technology to support the achievement of objectives",
|
||||
"Description": "Determines Dependency Between the Use of Technology in Business Processes and Technology General Controls - Management understands and determines the dependency and linkage between business processes, automated control activities, and technology general controls.Establishes Relevant Technology Infrastructure Control Activities - Management selects and develops control activities over the technology infrastructure, which are designed and implemented to help ensure the completeness, accuracy, and availability of technology processing.Establishes Relevant Security Management Process Controls Activities - Management selects and develops control activities that are designed and implemented to restrict technology access rights to authorized users commensurate with their job responsibilities and to protect the entity’s assets from external threats.Establishes Relevant Technology Acquisition, Development, and Maintenance Process Control Activities - Management selects and develops control activities over the acquisition, development, and maintenance of technology and its infrastructure to achieve management’s objectives.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_5_2",
|
||||
"Section": "CC5.0 - Control Activities",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_5_3",
|
||||
"Name": "CCC5.3 COSO Principle 12: The entity deploys control activities through policies that establish what is expected and in procedures that put policies into action",
|
||||
"Description": "Establishes Policies and Procedures to Support Deployment of Management ‘s Directives - Management establishes control activities that are built into business processes and employees’ day-to-day activities through policies establishing what is expected and relevant procedures specifying actions.Establishes Responsibility and Accountability for Executing Policies and Procedures - Management establishes responsibility and accountability for control activities with management (or other designated personnel) of the business unit or function in which the relevant risks reside.Performs in a Timely Manner - Responsible personnel perform control activities in a timely manner as defined by the policies and procedures.Takes Corrective Action - Responsible personnel investigate and act on matters identified as a result of executing control activities.Performs Using Competent Personnel - Competent personnel with sufficient authority perform control activities with diligence and continuing focus.Reassesses Policies and Procedures - Management periodically reviews control activities to determine their continued relevance and refreshes them when necessary.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_5_3",
|
||||
"Section": "CC5.0 - Control Activities",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_6_1",
|
||||
"Name": "CC6.1 The entity implements logical access security software, infrastructure, and architectures over protected information assets to protect them from security events to meet the entity's objectives",
|
||||
"Description": "Identifies and Manages the Inventory of Information Assets - The entity identifies, inventories, classifies, and manages information assets.Restricts Logical Access - Logical access to information assets, including hardware, data (at-rest, during processing, or in transmission), software, administrative authorities, mobile devices, output, and offline system components is restricted through the use of access control software and rule sets.Identifies and Authenticates Users - Persons, infrastructure and software are identified and authenticated prior to accessing information assets, whether locally or remotely.Considers Network Segmentation - Network segmentation permits unrelated portions of the entity's information system to be isolated from each other.Manages Points of Access - Points of access by outside entities and the types of data that flow through the points of access are identified, inventoried, and managed. The types of individuals and systems using each point of access are identified, documented, and managed.Restricts Access to Information Assets - Combinations of data classification, separate data structures, port restrictions, access protocol restrictions, user identification, and digital certificates are used to establish access control rules for information assets.Manages Identification and Authentication - Identification and authentication requirements are established, documented, and managed for individuals and systems accessing entity information, infrastructure and software.Manages Credentials for Infrastructure and Software - New internal and external infrastructure and software are registered, authorized, and documented prior to being granted access credentials and implemented on the network or access point. Credentials are removed and access is disabled when access is no longer required or the infrastructure and software are no longer in use.Uses Encryption to Protect Data - The entity uses encryption to supplement other measures used to protect data-at-rest, when such protections are deemed appropriate based on assessed risk.Protects Encryption Keys - Processes are in place to protect encryption keys during generation, storage, use, and destruction.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_6_1",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "s3",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_bucket_public_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_6_2",
|
||||
"Name": "CC6.2 Prior to issuing system credentials and granting system access, the entity registers and authorizes new internal and external users whose access is administered by the entity",
|
||||
"Description": "Prior to issuing system credentials and granting system access, the entity registers and authorizes new internal and external users whose access is administered by the entity. For those users whose access is administered by the entity, user system credentials are removed when user access is no longer authorized.Controls Access Credentials to Protected Assets - Information asset access credentials are created based on an authorization from the system's asset owner or authorized custodian.Removes Access to Protected Assets When Appropriate - Processes are in place to remove credential access when an individual no longer requires such access.Reviews Appropriateness of Access Credentials - The appropriateness of access credentials is reviewed on a periodic basis for unnecessary and inappropriate individuals with credentials.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_6_2",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "rds",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_instance_no_public_access"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_6_3",
|
||||
"Name": "CC6.3 The entity authorizes, modifies, or removes access to data, software, functions, and other protected information assets based on roles, responsibilities, or the system design and changes, giving consideration to the concepts of least privilege and segregation of duties, to meet the entity’s objectives",
|
||||
"Description": "Creates or Modifies Access to Protected Information Assets - Processes are in place to create or modify access to protected information assets based on authorization from the asset’s owner.Removes Access to Protected Information Assets - Processes are in place to remove access to protected information assets when an individual no longer requires access.Uses Role-Based Access Controls - Role-based access control is utilized to support segregation of incompatible functions.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_6_3",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "iam",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_no_administrative_privileges"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_6_4",
|
||||
"Name": "CC6.4 The entity restricts physical access to facilities and protected information assets to authorized personnel to meet the entity’s objectives",
|
||||
"Description": "Creates or Modifies Physical Access - Processes are in place to create or modify physical access to facilities such as data centers, office spaces, and work areas, based on authorization from the system's asset owner.Removes Physical Access - Processes are in place to remove access to physical resources when an individual no longer requires access.Reviews Physical Access - Processes are in place to periodically review physical access to ensure consistency with job responsibilities.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_6_4",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_6_5",
|
||||
"Name": "CC6.5 The entity discontinues logical and physical protections over physical assets only after the ability to read or recover data and software from those assets has been diminished and is no longer required to meet the entity’s objectives",
|
||||
"Description": "Identifies Data and Software for Disposal - Procedures are in place to identify data and software stored on equipment to be disposed and to render such data and software unreadable.Removes Data and Software From Entity Control - Procedures are in place to remove data and software stored on equipment to be removed from the physical control of the entity and to render such data and software unreadable.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_6_5",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_6_6",
|
||||
"Name": "CC6.6 The entity implements logical access security measures to protect against threats from sources outside its system boundaries",
|
||||
"Description": "Restricts Access — The types of activities that can occur through a communication channel (for example, FTP site, router port) are restricted.Protects Identification and Authentication Credentials — Identification and authentication credentials are protected during transmission outside its system boundaries.Requires Additional Authentication or Credentials — Additional authentication information or credentials are required when accessing the system from outside its boundaries.Implements Boundary Protection Systems — Boundary protection systems (for example, firewalls, demilitarized zones, and intrusion detection systems) are implemented to protect external access points from attempts and unauthorized access and are monitored to detect such attempts.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_6_6",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "ec2",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_instance_public_ip"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_6_7",
|
||||
"Name": "CC6.7 The entity restricts the transmission, movement, and removal of information to authorized internal and external users and processes, and protects it during transmission, movement, or removal to meet the entity’s objectives",
|
||||
"Description": "Restricts the Ability to Perform Transmission - Data loss prevention processes and technologies are used to restrict ability to authorize and execute transmission, movement and removal of information.Uses Encryption Technologies or Secure Communication Channels to Protect Data - Encryption technologies or secured communication channels are used to protect transmission of data and other communications beyond connectivity access points.Protects Removal Media - Encryption technologies and physical asset protections are used for removable media (such as USB drives and back-up tapes), as appropriate.Protects Mobile Devices - Processes are in place to protect mobile devices (such as laptops, smart phones and tablets) that serve as information assets.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_6_7",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "acm",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"acm_certificates_expiration_check"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_6_8",
|
||||
"Name": "CC6.8 The entity implements controls to prevent or detect and act upon the introduction of unauthorized or malicious software to meet the entity’s objectives",
|
||||
"Description": "Restricts Application and Software Installation - The ability to install applications and software is restricted to authorized individuals.Detects Unauthorized Changes to Software and Configuration Parameters - Processes are in place to detect changes to software and configuration parameters that may be indicative of unauthorized or malicious software.Uses a Defined Change Control Process - A management-defined change control process is used for the implementation of software.Uses Antivirus and Anti-Malware Software - Antivirus and anti-malware software is implemented and maintained to provide for the interception or detection and remediation of malware.Scans Information Assets from Outside the Entity for Malware and Other Unauthorized Software - Procedures are in place to scan information assets that have been transferred or returned to the entity’s custody for malware and other unauthorized software and to remove any items detected prior to its implementation on the network.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_6_8",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_7_1",
|
||||
"Name": "CC7.1 To meet its objectives, the entity uses detection and monitoring procedures to identify (1) changes to configurations that result in the introduction of new vulnerabilities, and (2) susceptibilities to newly discovered vulnerabilities",
|
||||
"Description": "Uses Defined Configuration Standards - Management has defined configuration standards.Monitors Infrastructure and Software - The entity monitors infrastructure and software for noncompliance with the standards, which could threaten the achievement of the entity's objectives.Implements Change-Detection Mechanisms - The IT system includes a change-detection mechanism (for example, file integrity monitoring tools) to alert personnel to unauthorized modifications of critical system files, configuration files, or content files.Detects Unknown or Unauthorized Components - Procedures are in place to detect the introduction of unknown or unauthorized components.Conducts Vulnerability Scans - The entity conducts vulnerability scans designed to identify potential vulnerabilities or misconfigurations on a periodic basis and after any significant change in the environment and takes action to remediate identified deficiencies on a timely basis.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_7_1",
|
||||
"Section": "CC7.0 - System Operations",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"guardduty_is_enabled",
|
||||
"securityhub_enabled",
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_7_2",
|
||||
"Name": "CC7.2 The entity monitors system components and the operation of those components for anomalies that are indicative of malicious acts, natural disasters, and errors affecting the entity's ability to meet its objectives; anomalies are analyzed to determine whether they represent security events",
|
||||
"Description": "Implements Detection Policies, Procedures, and Tools - Detection policies and procedures are defined and implemented, and detection tools are implemented on Infrastructure and software to identify anomalies in the operation or unusual activity on systems. Procedures may include (1) a defined governance process for security event detection and management that includes provision of resources; (2) use of intelligence sources to identify newly discovered threats and vulnerabilities; and (3) logging of unusual system activities.Designs Detection Measures - Detection measures are designed to identify anomalies that could result from actual or attempted (1) compromise of physical barriers; (2) unauthorized actions of authorized personnel; (3) use of compromised identification and authentication credentials; (4) unauthorized access from outside the system boundaries; (5) compromise of authorized external parties; and (6) implementation or connection of unauthorized hardware and software.Implements Filters to Analyze Anomalies - Management has implemented procedures to filter, summarize, and analyze anomalies to identify security events.Monitors Detection Tools for Effective Operation - Management has implemented processes to monitor the effectiveness of detection tools.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_7_2",
|
||||
"Section": "CC7.0 - System Operations",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"cloudwatch_changes_to_network_gateways_alarm_configured",
|
||||
"cloudwatch_changes_to_network_route_tables_alarm_configured",
|
||||
"cloudwatch_changes_to_vpcs_alarm_configured",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"securityhub_enabled",
|
||||
"cloudwatch_log_group_retention_policy_specific_days_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"redshift_cluster_audit_logging",
|
||||
"vpc_flow_logs_enabled",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"apigateway_logging_enabled",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_7_3",
|
||||
"Name": "CC7.3 The entity evaluates security events to determine whether they could or have resulted in a failure of the entity to meet its objectives (security incidents) and, if so, takes actions to prevent or address such failures",
|
||||
"Description": "Responds to Security Incidents - Procedures are in place for responding to security incidents and evaluating the effectiveness of those policies and procedures on a periodic basis.Communicates and Reviews Detected Security Events - Detected security events are communicated to and reviewed by the individuals responsible for the management of the security program and actions are taken, if necessary.Develops and Implements Procedures to Analyze Security Incidents - Procedures are in place to analyze security incidents and determine system impact.Assesses the Impact on Personal Information - Detected security events are evaluated to determine whether they could or did result in the unauthorized disclosure or use of personal information and whether there has been a failure to comply with applicable laws or regulations.Determines Personal Information Used or Disclosed - When an unauthorized use or disclosure of personal information has occurred, the affected information is identified.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_7_3",
|
||||
"Section": "CC7.0 - System Operations",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"apigateway_logging_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"securityhub_enabled",
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"cloudwatch_changes_to_network_gateways_alarm_configured",
|
||||
"cloudwatch_changes_to_network_route_tables_alarm_configured",
|
||||
"cloudwatch_changes_to_vpcs_alarm_configured",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"s3_bucket_server_access_logging_enabled",
|
||||
"cloudwatch_log_group_retention_policy_specific_days_enabled",
|
||||
"vpc_flow_logs_enabled",
|
||||
"guardduty_no_high_severity_findings"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_7_4",
|
||||
"Name": "CC7.4 The entity responds to identified security incidents by executing a defined incident response program to understand, contain, remediate, and communicate security incidents, as appropriate",
|
||||
"Description": "Assigns Roles and Responsibilities - Roles and responsibilities for the design, implementation, maintenance, and execution of the incident response program are assigned, including the use of external resources when necessary.Contains Security Incidents - Procedures are in place to contain security incidents that actively threaten entity objectives.Mitigates Ongoing Security Incidents - Procedures are in place to mitigate the effects of ongoing security incidents.Ends Threats Posed by Security Incidents - Procedures are in place to end the threats posed by security incidents through closure of the vulnerability, removal of unauthorized access, and other remediation actions.Restores Operations - Procedures are in place to restore data and business operations to an interim state that permits the achievement of entity objectives. Develops and Implements Communication Protocols for Security Incidents - Protocols for communicating security incidents and actions taken to affected parties are developed and implemented to meet the entity's objectives.Obtains Understanding of Nature of Incident and Determines Containment Strategy - An understanding of the nature (for example, the method by which the incident occurred and the affected system resources) and severity of the security incident is obtained to determine the appropriate containment strategy, including (1) a determination of the appropriate response time frame, and (2) the determination and execution of the containment approach.Remediates Identified Vulnerabilities - Identified vulnerabilities are remediated through the development and execution of remediation activities.Communicates Remediation Activities - Remediation activities are documented and communicated in accordance with the incident response program.Evaluates the Effectiveness of Incident Response - The design of incident response activities is evaluated for effectiveness on a periodic basis.Periodically Evaluates Incidents - Periodically, management reviews incidents related to security, availability, processing integrity, confidentiality, and privacy and identifies the need for system changes based on incident patterns and root causes. Communicates Unauthorized Use and Disclosure - Events that resulted in unauthorized use or disclosure of personal information are communicated to the data subjects, legal and regulatory authorities, and others as required.Application of Sanctions - The conduct of individuals and organizations operating under the authority of the entity and involved in the unauthorized use or disclosure of personal information is evaluated and, if appropriate, sanctioned in accordance with entity policies and legal and regulatory requirements.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_7_4",
|
||||
"Section": "CC7.0 - System Operations",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"cloudwatch_changes_to_network_gateways_alarm_configured",
|
||||
"cloudwatch_changes_to_network_route_tables_alarm_configured",
|
||||
"cloudwatch_changes_to_vpcs_alarm_configured",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"guardduty_no_high_severity_findings",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning",
|
||||
"securityhub_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_7_5",
|
||||
"Name": "CC7.5 The entity identifies, develops, and implements activities to recover from identified security incidents",
|
||||
"Description": "Restores the Affected Environment - The activities restore the affected environment to functional operation by rebuilding systems, updating software, installing patches, and changing configurations, as needed.Communicates Information About the Event - Communications about the nature of the incident, recovery actions taken, and activities required for the prevention of future security events are made to management and others as appropriate (internal and external).Determines Root Cause of the Event - The root cause of the event is determined.Implements Changes to Prevent and Detect Recurrences - Additional architecture or changes to preventive and detective controls, or both, are implemented to prevent and detect recurrences on a timely basis.Improves Response and Recovery Procedures - Lessons learned are analyzed, and the incident response plan and recovery procedures are improved.Implements Incident Recovery Plan Testing - Incident recovery plan testing is performed on a periodic basis. The testing includes (1) development of testing scenarios based on threat likelihood and magnitude; (2) consideration of relevant system components from across the entity that can impair availability; (3) scenarios that consider the potential for the lack of availability of key personnel; and (4) revision of continuity plans and systems based on test results.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_7_5",
|
||||
"Section": "CC7.0 - System Operations",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_8_1",
|
||||
"Name": "CC8.1 The entity authorizes, designs, develops or acquires, configures, documents, tests, approves, and implements changes to infrastructure, data, software, and procedures to meet its objectives",
|
||||
"Description": "Manages Changes Throughout the System Lifecycle - A process for managing system changes throughout the lifecycle of the system and its components (infrastructure, data, software and procedures) is used to support system availability and processing integrity.Authorizes Changes - A process is in place to authorize system changes prior to development.Designs and Develops Changes - A process is in place to design and develop system changes.Documents Changes - A process is in place to document system changes to support ongoing maintenance of the system and to support system users in performing their responsibilities.Tracks System Changes - A process is in place to track system changes prior to implementation.Configures Software - A process is in place to select and implement the configuration parameters used to control the functionality of software.Tests System Changes - A process is in place to test system changes prior to implementation.Approves System Changes - A process is in place to approve system changes prior to implementation.Deploys System Changes - A process is in place to implement system changes.Identifies and Evaluates System Changes - Objectives affected by system changes are identified, and the ability of the modified system to meet the objectives is evaluated throughout the system development life cycle.Identifies Changes in Infrastructure, Data, Software, and Procedures Required to Remediate Incidents - Changes in infrastructure, data, software, and procedures required to remediate incidents to continue to meet objectives are identified, and the change process is initiated upon identification.Creates Baseline Configuration of IT Technology - A baseline configuration of IT and control systems is created and maintained.Provides for Changes Necessary in Emergency Situations - A process is in place for authorizing, designing, testing, approving and implementing changes necessary in emergency situations (that is, changes that need to be implemented in an urgent timeframe).Protects Confidential Information - The entity protects confidential information during system design, development, testing, implementation, and change processes to meet the entity’s objectives related to confidentiality.Protects Personal Information - The entity protects personal information during system design, development, testing, implementation, and change processes to meet the entity’s objectives related to privacy.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_8_1",
|
||||
"Section": "CC8.0 - Change Management",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"config_recorder_all_regions_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_9_1",
|
||||
"Name": "CC9.1 The entity identifies, selects, and develops risk mitigation activities for risks arising from potential business disruptions",
|
||||
"Description": "Considers Mitigation of Risks of Business Disruption - Risk mitigation activities include the development of planned policies, procedures, communications, and alternative processing solutions to respond to, mitigate, and recover from security events that disrupt business operations. Those policies and procedures include monitoring processes and information and communications to meet the entity's objectives during response, mitigation, and recovery efforts.Considers the Use of Insurance to Mitigate Financial Impact Risks - The risk management activities consider the use of insurance to offset the financial impact of loss events that would otherwise impair the ability of the entity to meet its objectives.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_9_1",
|
||||
"Section": "CC9.0 - Risk Mitigation",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_9_2",
|
||||
"Name": "CC9.2 The entity assesses and manages risks associated with vendors and business partners",
|
||||
"Description": "Establishes Requirements for Vendor and Business Partner Engagements - The entity establishes specific requirements for a vendor and business partner engagement that includes (1) scope of services and product specifications, (2) roles and responsibilities, (3) compliance requirements, and (4) service levels.Assesses Vendor and Business Partner Risks - The entity assesses, on a periodic basis, the risks that vendors and business partners (and those entities’ vendors and business partners) represent to the achievement of the entity's objectives.Assigns Responsibility and Accountability for Managing Vendors and Business Partners - The entity assigns responsibility and accountability for the management of risks associated with vendors and business partners.Establishes Communication Protocols for Vendors and Business Partners - The entity establishes communication and resolution protocols for service or product issues related to vendors and business partners.Establishes Exception Handling Procedures From Vendors and Business Partners - The entity establishes exception handling procedures for service or product issues related to vendors and business partners.Assesses Vendor and Business Partner Performance - The entity periodically assesses the performance of vendors and business partners.Implements Procedures for Addressing Issues Identified During Vendor and Business Partner Assessments - The entity implements procedures for addressing issues identified with vendor and business partner relationships.Implements Procedures for Terminating Vendor and Business Partner Relationships - The entity implements procedures for terminating vendor and business partner relationships.Obtains Confidentiality Commitments from Vendors and Business Partners - The entity obtains confidentiality commitments that are consistent with the entity’s confidentiality commitments and requirements from vendors and business partners who have access to confidential information.Assesses Compliance With Confidentiality Commitments of Vendors and Business Partners - On a periodic and as-needed basis, the entity assesses compliance by vendors and business partners with the entity’s confidentiality commitments and requirements.Obtains Privacy Commitments from Vendors and Business Partners - The entity obtains privacy commitments, consistent with the entity’s privacy commitments and requirements, from vendors and business partners who have access to personal information.Assesses Compliance with Privacy Commitments of Vendors and Business Partners - On a periodic and as-needed basis, the entity assesses compliance by vendors and business partners with the entity’s privacy commitments and requirements and takes corrective action as necessary.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_9_2",
|
||||
"Section": "CC9.0 - Risk Mitigation",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_a_1_1",
|
||||
"Name": "A1.1 The entity maintains, monitors, and evaluates current processing capacity and use of system components (infrastructure, data, and software) to manage capacity demand and to enable the implementation of additional capacity to help meet its objectives",
|
||||
"Description": "Measures Current Usage - The use of the system components is measured to establish a baseline for capacity management and to use when evaluating the risk of impaired availability due to capacity constraints.Forecasts Capacity - The expected average and peak use of system components is forecasted and compared to system capacity and associated tolerances. Forecasting considers capacity in the event of the failure of system components that constrain capacity.Makes Changes Based on Forecasts - The system change management process is initiated when forecasted usage exceeds capacity tolerances.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_a_1_1",
|
||||
"Section": "CCA1.0 - Additional Criterial for Availability",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_a_1_2",
|
||||
"Name": "A1.2 The entity authorizes, designs, develops or acquires, implements, operates, approves, maintains, and monitors environmental protections, software, data back-up processes, and recovery infrastructure to meet its objectives",
|
||||
"Description": "Measures Current Usage - The use of the system components is measured to establish a baseline for capacity management and to use when evaluating the risk of impaired availability due to capacity constraints.Forecasts Capacity - The expected average and peak use of system components is forecasted and compared to system capacity and associated tolerances. Forecasting considers capacity in the event of the failure of system components that constrain capacity.Makes Changes Based on Forecasts - The system change management process is initiated when forecasted usage exceeds capacity tolerances.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_a_1_2",
|
||||
"Section": "CCA1.0 - Additional Criterial for Availability",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"efs_have_backup_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
"rds_instance_backup_enabled",
|
||||
"redshift_cluster_automated_snapshot",
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_a_1_3",
|
||||
"Name": "A1.3 The entity tests recovery plan procedures supporting system recovery to meet its objectives",
|
||||
"Description": "Implements Business Continuity Plan Testing - Business continuity plan testing is performed on a periodic basis. The testing includes (1) development of testing scenarios based on threat likelihood and magnitude; (2) consideration of system components from across the entity that can impair the availability; (3) scenarios that consider the potential for the lack of availability of key personnel; and (4) revision of continuity plans and systems based on test results.Tests Integrity and Completeness of Back-Up Data - The integrity and completeness of back-up information is tested on a periodic basis.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_a_1_3",
|
||||
"Section": "CCA1.0 - Additional Criterial for Availability",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "cc_c_1_1",
|
||||
"Name": "C1.1 The entity identifies and maintains confidential information to meet the entity’s objectives related to confidentiality",
|
||||
"Description": "Identifies Confidential information - Procedures are in place to identify and designate confidential information when it is received or created and to determine the period over which the confidential information is to be retained.Protects Confidential Information from Destruction - Procedures are in place to protect confidential information from erasure or destruction during the specified retention period of the information",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_c_1_1",
|
||||
"Section": "CCC1.0 - Additional Criterial for Confidentiality",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"rds_instance_deletion_protection"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "cc_c_1_2",
|
||||
"Name": "C1.2 The entity disposes of confidential information to meet the entity’s objectives related to confidentiality",
|
||||
"Description": "Identifies Confidential Information for Destruction - Procedures are in place to identify confidential information requiring destruction when the end of the retention period is reached.Destroys Confidential Information - Procedures are in place to erase or otherwise destroy confidential information that has been identified for destruction.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "cc_c_1_2",
|
||||
"Section": "CCC1.0 - Additional Criterial for Confidentiality",
|
||||
"Service": "s3",
|
||||
"Soc_Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_bucket_object_versioning"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "p_1_1",
|
||||
"Name": "P1.1 The entity provides notice to data subjects about its privacy practices to meet the entity’s objectives related to privacy",
|
||||
"Description": "The entity provides notice to data subjects about its privacy practices to meet the entity’s objectives related to privacy. The notice is updated and communicated to data subjects in a timely manner for changes to the entity’s privacy practices, including changes in the use of personal information, to meet the entity’s objectives related to privacy.Communicates to Data Subjects - Notice is provided to data subjects regarding the following:Purpose for collecting personal informationChoice and consentTypes of personal information collectedMethods of collection (for example, use of cookies or other tracking techniques)Use, retention, and disposalAccessDisclosure to third partiesSecurity for privacyQuality, including data subjects’ responsibilities for qualityMonitoring and enforcementIf personal information is collected from sources other than the individual, such sources are described in the privacy notice.Provides Notice to Data Subjects - Notice is provided to data subjects (1) at or before the time personal information is collected or as soon as practical thereafter, (2) at or before the entity changes its privacy notice or as soon as practical thereafter, or (3) before personal information is used for new purposes not previously identified.Covers Entities and Activities in Notice - An objective description of the entities and activities covered is included in the entity’s privacy notice.Uses Clear and Conspicuous Language - The entity’s privacy notice is conspicuous and uses clear language.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_1_1",
|
||||
"Section": "P1.0 - Privacy Criteria Related to Notice and Communication of Objectives Related to Privacy",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_2_1",
|
||||
"Name": "P2.1 The entity communicates choices available regarding the collection, use, retention, disclosure, and disposal of personal information to the data subjects and the consequences, if any, of each choice",
|
||||
"Description": "The entity communicates choices available regarding the collection, use, retention, disclosure, and disposal of personal information to the data subjects and the consequences, if any, of each choice. Explicit consent for the collection, use, retention, disclosure, and disposal of personal information is obtained from data subjects or other authorized persons, if required. Such consent is obtained only for the intended purpose of the information to meet the entity’s objectives related to privacy. The entity’s basis for determining implicit consent for the collection, use, retention, disclosure, and disposal of personal information is documented.Communicates to Data Subjects - Data subjects are informed (a) about the choices available to them with respect to the collection, use, and disclosure of personal information and (b) that implicit or explicit consent is required to collect, use, and disclose personal information, unless a law or regulation specifically requires or allows otherwise.Communicates Consequences of Denying or Withdrawing Consent - When personal information is collected, data subjects are informed of the consequences of refusing to provide personal information or denying or withdrawing consent to use personal information for purposes identified in the notice.Obtains Implicit or Explicit Consent - Implicit or explicit consent is obtained from data subjects at or before the time personal information is collected or soon thereafter. The individual’s preferences expressed in his or her consent are confirmed and implemented.Documents and Obtains Consent for New Purposes and Uses - If information that was previously collected is to be used for purposes not previously identified in the privacy notice, the new purpose is documented, the data subject is notified, and implicit or explicit consent is obtained prior to such new use or purpose.Obtains Explicit Consent for Sensitive Information - Explicit consent is obtained directly from the data subject when sensitive personal information is collected, used, or disclosed, unless a law or regulation specifically requires otherwise.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_2_1",
|
||||
"Section": "P2.0 - Privacy Criteria Related to Choice and Consent",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_3_1",
|
||||
"Name": "P3.1 Personal information is collected consistent with the entity’s objectives related to privacy",
|
||||
"Description": "Limits the Collection of Personal Information - The collection of personal information is limited to that necessary to meet the entity’s objectives.Collects Information by Fair and Lawful Means - Methods of collecting personal information are reviewed by management before they are implemented to confirm that personal information is obtained (a) fairly, without intimidation or deception, and (b) lawfully, adhering to all relevant rules of law, whether derived from statute or common law, relating to the collection of personal information.Collects Information From Reliable Sources - Management confirms that third parties from whom personal information is collected (that is, sources other than the individual) are reliable sources that collect information fairly and lawfully.Informs Data Subjects When Additional Information Is Acquired - Data subjects are informed if the entity develops or acquires additional information about them for its use.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_3_1",
|
||||
"Section": "P3.0 - Privacy Criteria Related to Collection",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_3_2",
|
||||
"Name": "P3.2 For information requiring explicit consent, the entity communicates the need for such consent, as well as the consequences of a failure to provide consent for the request for personal information, and obtains the consent prior to the collection of the information to meet the entity’s objectives related to privacy",
|
||||
"Description": "Obtains Explicit Consent for Sensitive Information - Explicit consent is obtained directly from the data subject when sensitive personal information is collected, used, or disclosed, unless a law or regulation specifically requires otherwise.Documents Explicit Consent to Retain Information - Documentation of explicit consent for the collection, use, or disclosure of sensitive personal information is retained in accordance with objectives related to privacy.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_3_2",
|
||||
"Section": "P3.0 - Privacy Criteria Related to Collection",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_4_1",
|
||||
"Name": "P4.1 The entity limits the use of personal information to the purposes identified in the entity’s objectives related to privacy",
|
||||
"Description": "Uses Personal Information for Intended Purposes - Personal information is used only for the intended purposes for which it was collected and only when implicit or explicit consent has been obtained unless a law or regulation specifically requires otherwise.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_4_1",
|
||||
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_4_2",
|
||||
"Name": "P4.2 The entity retains personal information consistent with the entity’s objectives related to privacy",
|
||||
"Description": "Retains Personal Information - Personal information is retained for no longer than necessary to fulfill the stated purposes, unless a law or regulation specifically requires otherwise.Protects Personal Information - Policies and procedures have been implemented to protect personal information from erasure or destruction during the specified retention period of the information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_4_2",
|
||||
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_4_3",
|
||||
"Name": "P4.3 The entity securely disposes of personal information to meet the entity’s objectives related to privacy",
|
||||
"Description": "Captures, Identifies, and Flags Requests for Deletion - Requests for deletion of personal information are captured, and information related to the requests is identified and flagged for destruction to meet the entity’s objectives related to privacy.Disposes of, Destroys, and Redacts Personal Information - Personal information no longer retained is anonymized, disposed of, or destroyed in a manner that prevents loss, theft, misuse, or unauthorized access.Destroys Personal Information - Policies and procedures are implemented to erase or otherwise destroy personal information that has been identified for destruction.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_4_3",
|
||||
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_5_1",
|
||||
"Name": "P5.1 The entity grants identified and authenticated data subjects the ability to access their stored personal information for review and, upon request, provides physical or electronic copies of that information to data subjects to meet the entity’s objectives related to privacy",
|
||||
"Description": "The entity grants identified and authenticated data subjects the ability to access their stored personal information for review and, upon request, provides physical or electronic copies of that information to data subjects to meet the entity’s objectives related to privacy. If access is denied, data subjects are informed of the denial and reason for such denial, as required, to meet the entity’s objectives related to privacy.Authenticates Data Subjects’ Identity - The identity of data subjects who request access to their personal information is authenticated before they are given access to that information.Permits Data Subjects Access to Their Personal Information - Data subjects are able to determine whether the entity maintains personal information about them and, upon request, may obtain access to their personal information.Provides Understandable Personal Information Within Reasonable Time - Personal information is provided to data subjects in an understandable form, in a reasonable time frame, and at a reasonable cost, if any.Informs Data Subjects If Access Is Denied - When data subjects are denied access to their personal information, the entity informs them of the denial and the reason for the denial in a timely manner, unless prohibited by law or regulation.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_5_1",
|
||||
"Section": "P5.0 - Privacy Criteria Related to Access",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_5_2",
|
||||
"Name": "P5.2 The entity corrects, amends, or appends personal information based on information provided by data subjects and communicates such information to third parties, as committed or required, to meet the entity’s objectives related to privacy",
|
||||
"Description": "The entity corrects, amends, or appends personal information based on information provided by data subjects and communicates such information to third parties, as committed or required, to meet the entity’s objectives related to privacy. If a request for correction is denied, data subjects are informed of the denial and reason for such denial to meet the entity’s objectives related to privacy.Communicates Denial of Access Requests - Data subjects are informed, in writing, of the reason a request for access to their personal information was denied, the source of the entity’s legal right to deny such access, if applicable, and the individual’s right, if any, to challenge such denial, as specifically permitted or required by law or regulation.Permits Data Subjects to Update or Correct Personal Information - Data subjects are able to update or correct personal information held by the entity. The entity provides such updated or corrected information to third parties that were previously provided with the data subject’s personal information consistent with the entity’s objective related to privacy.Communicates Denial of Correction Requests - Data subjects are informed, in writing, about the reason a request for correction of personal information was denied and how they may appeal.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_5_2",
|
||||
"Section": "P5.0 - Privacy Criteria Related to Access",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_6_1",
|
||||
"Name": "P6.1 The entity discloses personal information to third parties with the explicit consent of data subjects, and such consent is obtained prior to disclosure to meet the entity’s objectives related to privacy",
|
||||
"Description": "Communicates Privacy Policies to Third Parties - Privacy policies or other specific instructions or requirements for handling personal information are communicated to third parties to whom personal information is disclosed.Discloses Personal Information Only When Appropriate - Personal information is disclosed to third parties only for the purposes for which it was collected or created and only when implicit or explicit consent has been obtained from the data subject, unless a law or regulation specifically requires otherwise.Discloses Personal Information Only to Appropriate Third Parties - Personal information is disclosed only to third parties who have agreements with the entity to protect personal information in a manner consistent with the relevant aspects of the entity’s privacy notice or other specific instructions or requirements. The entity has procedures in place to evaluate that the third parties have effective controls to meet the terms of the agreement, instructions, or requirements.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_6_1",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_6_2",
|
||||
"Name": "P6.2 The entity creates and retains a complete, accurate, and timely record of authorized disclosures of personal information to meet the entity’s objectives related to privacy",
|
||||
"Description": "Creates and Retains Record of Authorized Disclosures - The entity creates and maintains a record of authorized disclosures of personal information that is complete, accurate, and timely.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_6_2",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_6_3",
|
||||
"Name": "P6.3 The entity creates and retains a complete, accurate, and timely record of detected or reported unauthorized disclosures (including breaches) of personal information to meet the entity’s objectives related to privacy",
|
||||
"Description": "Creates and Retains Record of Detected or Reported Unauthorized Disclosures - The entity creates and maintains a record of detected or reported unauthorized disclosures of personal information that is complete, accurate, and timely.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_6_3",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_6_4",
|
||||
"Name": "P6.4 The entity obtains privacy commitments from vendors and other third parties who have access to personal information to meet the entity’s objectives related to privacy",
|
||||
"Description": "The entity obtains privacy commitments from vendors and other third parties who have access to personal information to meet the entity’s objectives related to privacy. The entity assesses those parties’ compliance on a periodic and as-needed basis and takes corrective action, if necessary.Discloses Personal Information Only to Appropriate Third Parties - Personal information is disclosed only to third parties who have agreements with the entity to protect personal information in a manner consistent with the relevant aspects of the entity’s privacy notice or other specific instructions or requirements. The entity has procedures in place to evaluate that the third parties have effective controls to meet the terms of the agreement, instructions, or requirements.Remediates Misuse of Personal Information by a Third Party - The entity takes remedial action in response to misuse of personal information by a third party to whom the entity has transferred such information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_6_4",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_6_5",
|
||||
"Name": "P6.5 The entity obtains commitments from vendors and other third parties with access to personal information to notify the entity in the event of actual or suspected unauthorized disclosures of personal information",
|
||||
"Description": "The entity obtains commitments from vendors and other third parties with access to personal information to notify the entity in the event of actual or suspected unauthorized disclosures of personal information. Such notifications are reported to appropriate personnel and acted on in accordance with established incident response procedures to meet the entity’s objectives related to privacy.Remediates Misuse of Personal Information by a Third Party - The entity takes remedial action in response to misuse of personal information by a third party to whom the entity has transferred such information.Reports Actual or Suspected Unauthorized Disclosures - A process exists for obtaining commitments from vendors and other third parties to report to the entity actual or suspected unauthorized disclosures of personal information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_6_5",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_6_6",
|
||||
"Name": "P6.6 The entity provides notification of breaches and incidents to affected data subjects, regulators, and others to meet the entity’s objectives related to privacy",
|
||||
"Description": "Remediates Misuse of Personal Information by a Third Party - The entity takes remedial action in response to misuse of personal information by a third party to whom the entity has transferred such information. Reports Actual or Suspected Unauthorized Disclosures - A process exists for obtaining commitments from vendors and other third parties to report to the entity actual or suspected unauthorized disclosures of personal information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_6_6",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_6_7",
|
||||
"Name": "P6.7 The entity provides data subjects with an accounting of the personal information held and disclosure of the data subjects’ personal information, upon the data subjects’ request, to meet the entity’s objectives related to privacy",
|
||||
"Description": "Identifies Types of Personal Information and Handling Process - The types of personal information and sensitive personal information and the related processes, systems, and third parties involved in the handling of such information are identified. Captures, Identifies, and Communicates Requests for Information - Requests for an accounting of personal information held and disclosures of the data subjects’ personal information are captured, and information related to the requests is identified and communicated to data subjects to meet the entity’s objectives related to privacy.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_6_7",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_7_1",
|
||||
"Name": "P7.1 The entity collects and maintains accurate, up-to-date, complete, and relevant personal information to meet the entity’s objectives related to privacy",
|
||||
"Description": "Ensures Accuracy and Completeness of Personal Information - Personal information is accurate and complete for the purposes for which it is to be used. Ensures Relevance of Personal Information - Personal information is relevant to the purposes for which it is to be used.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_7_1",
|
||||
"Section": "P7.0 - Privacy Criteria Related to Quality",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "p_8_1",
|
||||
"Name": "P8.1 The entity implements a process for receiving, addressing, resolving, and communicating the resolution of inquiries, complaints, and disputes from data subjects and others and periodically monitors compliance to meet the entity’s objectives related to privacy",
|
||||
"Description": "The entity implements a process for receiving, addressing, resolving, and communicating the resolution of inquiries, complaints, and disputes from data subjects and others and periodically monitors compliance to meet the entity’s objectives related to privacy. Corrections and other necessary actions related to identified deficiencies are made or taken in a timely manner.Communicates to Data Subjects—Data subjects are informed about how to contact the entity with inquiries, complaints, and disputes.Addresses Inquiries, Complaints, and Disputes - A process is in place to address inquiries, complaints, and disputes.Documents and Communicates Dispute Resolution and Recourse - Each complaint is addressed, and the resolution is documented and communicated to the individual.Documents and Reports Compliance Review Results - Compliance with objectives related to privacy are reviewed and documented, and the results of such reviews are reported to management. If problems are identified, remediation plans are developed and implemented.Documents and Reports Instances of Noncompliance - Instances of noncompliance with objectives related to privacy are documented and reported and, if needed, corrective and disciplinary measures are taken on a timely basis.Performs Ongoing Monitoring - Ongoing procedures are performed for monitoring the effectiveness of controls over personal information and for taking timely corrective actions when necessary.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "p_8_1",
|
||||
"Section": "P8.0 - Privacy Criteria Related to Monitoring and Enforcement",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
### Account, Check and/or Region can be * to apply for all the cases
|
||||
### Resources is a list that can have either Regex or Keywords:
|
||||
### Resources is a list that can have either Regex or Keywords
|
||||
### Tags is an optional list containing tuples of 'key=value'
|
||||
########################### ALLOWLIST EXAMPLE ###########################
|
||||
Allowlist:
|
||||
Accounts:
|
||||
@@ -15,7 +16,10 @@ Allowlist:
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test" # Will ignore every resource containing the string "test" in every account and region
|
||||
- "test" # Will ignore every resource containing the string "test" and the tags 'test=test' and 'project=test' in account 123456789012 and every region
|
||||
Tags:
|
||||
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and 'project=test' in account 123456789012 and every region
|
||||
- "project=test"
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
@@ -27,6 +31,14 @@ Allowlist:
|
||||
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
|
||||
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
|
||||
- "[[:alnum:]]+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Tags:
|
||||
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
|
||||
|
||||
|
||||
# EXAMPLE: CONTROL TOWER (to migrate)
|
||||
# When using Control Tower, guardrails prevent access to certain protected resources. The allowlist
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
from datetime import datetime, timezone
|
||||
from os import getcwd
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.utils.utils import open_file, parse_json_file
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "3.0.2"
|
||||
prowler_version = "3.3.1"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
|
||||
|
||||
@@ -17,11 +19,23 @@ orange_color = "\033[38;5;208m"
|
||||
banner_color = "\033[1;92m"
|
||||
|
||||
# Compliance
|
||||
compliance_specification_dir = "./compliance"
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
compliance_aws_dir = f"{actual_directory}/../compliance/aws"
|
||||
available_compliance_frameworks = []
|
||||
with os.scandir(compliance_aws_dir) as files:
|
||||
files = [
|
||||
file.name
|
||||
for file in files
|
||||
if file.is_file()
|
||||
and file.name.endswith(".json")
|
||||
and available_compliance_frameworks.append(file.name.removesuffix(".json"))
|
||||
]
|
||||
|
||||
# AWS services-regions matrix json
|
||||
aws_services_json_file = "aws_regions_by_service.json"
|
||||
|
||||
# gcp_zones_json_file = "gcp_zones.json"
|
||||
|
||||
default_output_directory = getcwd() + "/output"
|
||||
|
||||
output_file_timestamp = timestamp.strftime("%Y%m%d%H%M%S")
|
||||
@@ -30,7 +44,21 @@ csv_file_suffix = ".csv"
|
||||
json_file_suffix = ".json"
|
||||
json_asff_file_suffix = ".asff.json"
|
||||
html_file_suffix = ".html"
|
||||
config_yaml = f"{os.path.dirname(os.path.realpath(__file__))}/config.yaml"
|
||||
config_yaml = f"{pathlib.Path(os.path.dirname(os.path.realpath(__file__)))}/config.yaml"
|
||||
|
||||
|
||||
def check_current_version(prowler_version):
|
||||
try:
|
||||
release_response = requests.get(
|
||||
"https://api.github.com/repos/prowler-cloud/prowler/tags"
|
||||
)
|
||||
latest_version = json.loads(release_response)[0]["name"]
|
||||
if latest_version != prowler_version:
|
||||
return f"(latest is {latest_version}, upgrade for the latest features)"
|
||||
else:
|
||||
return "(it is the latest version, yay!)"
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def change_config_var(variable, value):
|
||||
@@ -55,22 +83,3 @@ def get_config_var(variable):
|
||||
except Exception as error:
|
||||
logger.error(f"{error.__class__.__name__}: {error}")
|
||||
return ""
|
||||
|
||||
|
||||
def get_aws_available_regions():
|
||||
try:
|
||||
actual_directory = ("/").join(
|
||||
os.path.dirname(os.path.realpath(__file__)).split("/")[:-1]
|
||||
)
|
||||
f = open_file(f"{actual_directory}/providers/aws/{aws_services_json_file}")
|
||||
data = parse_json_file(f)
|
||||
|
||||
regions = set()
|
||||
for service in data["services"].values():
|
||||
for partition in service["regions"]:
|
||||
for item in service["regions"][partition]:
|
||||
regions.add(item)
|
||||
return list(regions)
|
||||
except Exception as error:
|
||||
logger.error(f"{error.__class__.__name__}: {error}")
|
||||
return []
|
||||
|
||||
@@ -2,8 +2,10 @@ import functools
|
||||
import importlib
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from pkgutil import walk_packages
|
||||
from types import ModuleType
|
||||
from typing import Any
|
||||
|
||||
from alive_progress import alive_bar
|
||||
from colorama import Fore, Style
|
||||
@@ -12,9 +14,18 @@ from prowler.config.config import orange_color
|
||||
from prowler.lib.check.compliance_models import load_compliance_framework
|
||||
from prowler.lib.check.models import Check, load_check_metadata
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.outputs import report
|
||||
|
||||
try:
|
||||
lib = os.environ["PROWLER_REPORT_LIB_PATH"]
|
||||
outputs_module = importlib.import_module(lib)
|
||||
report = getattr(outputs_module, "report")
|
||||
except KeyError:
|
||||
from prowler.lib.outputs.outputs import report
|
||||
except Exception:
|
||||
sys.exit(1)
|
||||
|
||||
from prowler.lib.utils.utils import open_file, parse_json_file
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
from prowler.providers.common.outputs import Provider_Output_Options
|
||||
|
||||
|
||||
@@ -97,8 +108,8 @@ def exclude_services_to_run(
|
||||
# Load checks from checklist.json
|
||||
def parse_checks_from_file(input_file: str, provider: str) -> set:
|
||||
checks_to_execute = set()
|
||||
f = open_file(input_file)
|
||||
json_file = parse_json_file(f)
|
||||
with open_file(input_file) as f:
|
||||
json_file = parse_json_file(f)
|
||||
|
||||
for check_name in json_file[provider]:
|
||||
checks_to_execute.add(check_name)
|
||||
@@ -111,7 +122,10 @@ def list_services(provider: str) -> set():
|
||||
checks_tuple = recover_checks_from_provider(provider)
|
||||
for _, check_path in checks_tuple:
|
||||
# Format: /absolute_path/prowler/providers/{provider}/services/{service_name}/{check_name}
|
||||
service_name = check_path.split("/")[-2]
|
||||
if os.name == "nt":
|
||||
service_name = check_path.split("\\")[-2]
|
||||
else:
|
||||
service_name = check_path.split("/")[-2]
|
||||
available_services.add(service_name)
|
||||
return sorted(available_services)
|
||||
|
||||
@@ -125,17 +139,28 @@ def list_categories(provider: str, bulk_checks_metadata: dict) -> set():
|
||||
|
||||
|
||||
def print_categories(categories: set):
|
||||
print(
|
||||
f"There are {Fore.YELLOW}{len(categories)}{Style.RESET_ALL} available categories: \n"
|
||||
)
|
||||
categories_num = len(categories)
|
||||
plural_string = f"There are {Fore.YELLOW}{categories_num}{Style.RESET_ALL} available categories: \n"
|
||||
singular_string = f"There is {Fore.YELLOW}{categories_num}{Style.RESET_ALL} available category: \n"
|
||||
|
||||
message = plural_string if categories_num > 1 else singular_string
|
||||
print(message)
|
||||
for category in categories:
|
||||
print(f"- {category}")
|
||||
|
||||
|
||||
def print_services(service_list: set):
|
||||
print(
|
||||
f"There are {Fore.YELLOW}{len(service_list)}{Style.RESET_ALL} available services: \n"
|
||||
services_num = len(service_list)
|
||||
plural_string = (
|
||||
f"There are {Fore.YELLOW}{services_num}{Style.RESET_ALL} available services: \n"
|
||||
)
|
||||
singular_string = (
|
||||
f"There is {Fore.YELLOW}{services_num}{Style.RESET_ALL} available service: \n"
|
||||
)
|
||||
|
||||
message = plural_string if services_num > 1 else singular_string
|
||||
print(message)
|
||||
|
||||
for service in service_list:
|
||||
print(f"- {service}")
|
||||
|
||||
@@ -143,9 +168,12 @@ def print_services(service_list: set):
|
||||
def print_compliance_frameworks(
|
||||
bulk_compliance_frameworks: dict,
|
||||
):
|
||||
print(
|
||||
f"There are {Fore.YELLOW}{len(bulk_compliance_frameworks.keys())}{Style.RESET_ALL} available Compliance Frameworks: \n"
|
||||
)
|
||||
frameworks_num = len(bulk_compliance_frameworks.keys())
|
||||
plural_string = f"There are {Fore.YELLOW}{frameworks_num}{Style.RESET_ALL} available Compliance Frameworks: \n"
|
||||
singular_string = f"There is {Fore.YELLOW}{frameworks_num}{Style.RESET_ALL} available Compliance Framework: \n"
|
||||
message = plural_string if frameworks_num > 1 else singular_string
|
||||
|
||||
print(message)
|
||||
for framework in bulk_compliance_frameworks.keys():
|
||||
print(f"\t- {Fore.YELLOW}{framework}{Style.RESET_ALL}")
|
||||
|
||||
@@ -154,17 +182,18 @@ def print_compliance_requirements(
|
||||
bulk_compliance_frameworks: dict, compliance_frameworks: list
|
||||
):
|
||||
for compliance_framework in compliance_frameworks:
|
||||
for compliance in bulk_compliance_frameworks.values():
|
||||
# Workaround until we have more Compliance Frameworks
|
||||
split_compliance = compliance_framework.split("_")
|
||||
framework = split_compliance[0].upper()
|
||||
version = split_compliance[1].upper()
|
||||
provider = split_compliance[2].upper()
|
||||
if framework in compliance.Framework and compliance.Version == version:
|
||||
for key in bulk_compliance_frameworks.keys():
|
||||
framework = bulk_compliance_frameworks[key].Framework
|
||||
provider = bulk_compliance_frameworks[key].Provider
|
||||
version = bulk_compliance_frameworks[key].Version
|
||||
requirements = bulk_compliance_frameworks[key].Requirements
|
||||
# We can list the compliance requirements for a given framework using the
|
||||
# bulk_compliance_frameworks keys since they are the compliance specification file name
|
||||
if compliance_framework == key:
|
||||
print(
|
||||
f"Listing {framework} {version} {provider} Compliance Requirements:\n"
|
||||
)
|
||||
for requirement in compliance.Requirements:
|
||||
for requirement in requirements:
|
||||
checks = ""
|
||||
for check in requirement.Checks:
|
||||
checks += f" {Fore.YELLOW}\t\t{check}\n{Style.RESET_ALL}"
|
||||
@@ -187,11 +216,18 @@ def print_checks(
|
||||
logger.critical(
|
||||
f"Check {error} was not found for the {provider.upper()} provider"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
print(
|
||||
f"\nThere are {Fore.YELLOW}{len(check_list)}{Style.RESET_ALL} available checks.\n"
|
||||
checks_num = len(check_list)
|
||||
plural_string = (
|
||||
f"\nThere are {Fore.YELLOW}{checks_num}{Style.RESET_ALL} available checks.\n"
|
||||
)
|
||||
singular_string = (
|
||||
f"\nThere is {Fore.YELLOW}{checks_num}{Style.RESET_ALL} available check.\n"
|
||||
)
|
||||
|
||||
message = plural_string if checks_num > 1 else singular_string
|
||||
print(message)
|
||||
|
||||
|
||||
# Parse checks from compliance frameworks specification
|
||||
@@ -233,11 +269,7 @@ def recover_checks_from_provider(provider: str, service: str = None) -> list[tup
|
||||
# Format: "prowler.providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_module_name = module_name.name
|
||||
# We need to exclude common shared libraries in services
|
||||
if (
|
||||
check_module_name.count(".") == 6
|
||||
and "lib" not in check_module_name
|
||||
and "test" not in check_module_name
|
||||
):
|
||||
if check_module_name.count(".") == 6 and "lib" not in check_module_name:
|
||||
check_path = module_name.module_finder.path
|
||||
# Check name is the last part of the check_module_name
|
||||
check_name = check_module_name.split(".")[-1]
|
||||
@@ -245,10 +277,10 @@ def recover_checks_from_provider(provider: str, service: str = None) -> list[tup
|
||||
checks.append(check_info)
|
||||
except ModuleNotFoundError:
|
||||
logger.critical(f"Service {service} was not found for the {provider} provider.")
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}")
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
else:
|
||||
return checks
|
||||
|
||||
@@ -285,7 +317,7 @@ def import_check(check_path: str) -> ModuleType:
|
||||
|
||||
def run_check(check: Check, output_options: Provider_Output_Options) -> list:
|
||||
findings = []
|
||||
if output_options.verbose or output_options.is_quiet:
|
||||
if output_options.verbose:
|
||||
print(
|
||||
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity}]{Style.RESET_ALL}"
|
||||
)
|
||||
@@ -293,9 +325,12 @@ def run_check(check: Check, output_options: Provider_Output_Options) -> list:
|
||||
try:
|
||||
findings = check.execute()
|
||||
except Exception as error:
|
||||
print(f"Something went wrong in {check.CheckID}, please use --log-level ERROR")
|
||||
if not output_options.only_logs:
|
||||
print(
|
||||
f"Something went wrong in {check.CheckID}, please use --log-level ERROR"
|
||||
)
|
||||
logger.error(
|
||||
f"{check.CheckID} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
f"{check.CheckID} -- {error.__class__.__name__}[{traceback.extract_tb(error.__traceback__)[-1].lineno}]: {error}"
|
||||
)
|
||||
finally:
|
||||
return findings
|
||||
@@ -304,48 +339,187 @@ def run_check(check: Check, output_options: Provider_Output_Options) -> list:
|
||||
def execute_checks(
|
||||
checks_to_execute: list,
|
||||
provider: str,
|
||||
audit_info: AWS_Audit_Info,
|
||||
audit_info: Any,
|
||||
audit_output_options: Provider_Output_Options,
|
||||
) -> list:
|
||||
# List to store all the check's findings
|
||||
all_findings = []
|
||||
print(
|
||||
f"{Style.BRIGHT}Executing {len(checks_to_execute)} checks, please wait...{Style.RESET_ALL}\n"
|
||||
# Services and checks executed for the Audit Status
|
||||
services_executed = set()
|
||||
checks_executed = set()
|
||||
|
||||
# Initialize the Audit Metadata
|
||||
audit_info.audit_metadata = Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=checks_to_execute,
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
)
|
||||
with alive_bar(
|
||||
total=len(checks_to_execute),
|
||||
ctrl_c=False,
|
||||
bar="blocks",
|
||||
spinner="classic",
|
||||
stats=False,
|
||||
enrich_print=False,
|
||||
) as bar:
|
||||
|
||||
if os.name != "nt":
|
||||
try:
|
||||
from resource import RLIMIT_NOFILE, getrlimit
|
||||
|
||||
# Check ulimit for the maximum system open files
|
||||
soft, _ = getrlimit(RLIMIT_NOFILE)
|
||||
if soft < 4096:
|
||||
logger.warning(
|
||||
f"Your session file descriptors limit ({soft} open files) is below 4096. We recommend to increase it to avoid errors. Solve it running this command `ulimit -n 4096`. For more info visit https://docs.prowler.cloud/en/latest/troubleshooting/"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error("Unable to retrieve ulimit default settings")
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
# Execution with the --only-logs flag
|
||||
if audit_output_options.only_logs:
|
||||
for check_name in checks_to_execute:
|
||||
# Recover service from check name
|
||||
service = check_name.split("_")[0]
|
||||
bar.title = f"-> Scanning {orange_color}{service}{Style.RESET_ALL} service"
|
||||
try:
|
||||
# Import check module
|
||||
check_module_path = f"prowler.providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
lib = import_check(check_module_path)
|
||||
# Recover functions from check
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
c = check_to_execute()
|
||||
# Run check
|
||||
check_findings = run_check(c, audit_output_options)
|
||||
check_findings = execute(
|
||||
service,
|
||||
check_name,
|
||||
provider,
|
||||
audit_output_options,
|
||||
audit_info,
|
||||
services_executed,
|
||||
checks_executed,
|
||||
)
|
||||
all_findings.extend(check_findings)
|
||||
report(check_findings, audit_output_options, audit_info)
|
||||
bar()
|
||||
|
||||
# If check does not exists in the provider or is from another provider
|
||||
except ModuleNotFoundError:
|
||||
logger.critical(
|
||||
f"Check '{check_name}' was not found for the {provider.upper()} provider"
|
||||
)
|
||||
bar.title = f"-> {Fore.RED}Scan was aborted!{Style.RESET_ALL}"
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
bar.title = f"-> {Fore.GREEN}Scan completed!{Style.RESET_ALL}"
|
||||
else:
|
||||
# Default execution
|
||||
checks_num = len(checks_to_execute)
|
||||
plural_string = "checks"
|
||||
singular_string = "check"
|
||||
|
||||
check_noun = plural_string if checks_num > 1 else singular_string
|
||||
print(
|
||||
f"{Style.BRIGHT}Executing {checks_num} {check_noun}, please wait...{Style.RESET_ALL}\n"
|
||||
)
|
||||
with alive_bar(
|
||||
total=len(checks_to_execute),
|
||||
ctrl_c=False,
|
||||
bar="blocks",
|
||||
spinner="classic",
|
||||
stats=False,
|
||||
enrich_print=False,
|
||||
) as bar:
|
||||
for check_name in checks_to_execute:
|
||||
# Recover service from check name
|
||||
service = check_name.split("_")[0]
|
||||
bar.title = (
|
||||
f"-> Scanning {orange_color}{service}{Style.RESET_ALL} service"
|
||||
)
|
||||
try:
|
||||
check_findings = execute(
|
||||
service,
|
||||
check_name,
|
||||
provider,
|
||||
audit_output_options,
|
||||
audit_info,
|
||||
services_executed,
|
||||
checks_executed,
|
||||
)
|
||||
all_findings.extend(check_findings)
|
||||
bar()
|
||||
|
||||
# If check does not exists in the provider or is from another provider
|
||||
except ModuleNotFoundError:
|
||||
logger.critical(
|
||||
f"Check '{check_name}' was not found for the {provider.upper()} provider"
|
||||
)
|
||||
bar.title = f"-> {Fore.RED}Scan was aborted!{Style.RESET_ALL}"
|
||||
sys.exit(1)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
bar.title = f"-> {Fore.GREEN}Scan completed!{Style.RESET_ALL}"
|
||||
return all_findings
|
||||
|
||||
|
||||
def execute(
|
||||
service: str,
|
||||
check_name: str,
|
||||
provider: str,
|
||||
audit_output_options: Provider_Output_Options,
|
||||
audit_info: Any,
|
||||
services_executed: set,
|
||||
checks_executed: set,
|
||||
):
|
||||
# Import check module
|
||||
check_module_path = (
|
||||
f"prowler.providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
)
|
||||
lib = import_check(check_module_path)
|
||||
# Recover functions from check
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
c = check_to_execute()
|
||||
|
||||
# Run check
|
||||
check_findings = run_check(c, audit_output_options)
|
||||
|
||||
# Update Audit Status
|
||||
services_executed.add(service)
|
||||
checks_executed.add(check_name)
|
||||
audit_info.audit_metadata = update_audit_metadata(
|
||||
audit_info.audit_metadata, services_executed, checks_executed
|
||||
)
|
||||
|
||||
# Report the check's findings
|
||||
report(check_findings, audit_output_options, audit_info)
|
||||
|
||||
return check_findings
|
||||
|
||||
|
||||
def update_audit_metadata(
|
||||
audit_metadata: Audit_Metadata, services_executed: set, checks_executed: set
|
||||
) -> Audit_Metadata:
|
||||
"""update_audit_metadata returns the audit_metadata updated with the new status
|
||||
|
||||
Updates the given audit_metadata using the length of the services_executed and checks_executed
|
||||
"""
|
||||
try:
|
||||
audit_metadata.services_scanned = len(services_executed)
|
||||
audit_metadata.completed_checks = len(checks_executed)
|
||||
audit_metadata.audit_progress = (
|
||||
100 * len(checks_executed) / len(audit_metadata.expected_checks)
|
||||
)
|
||||
|
||||
return audit_metadata
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def recover_checks_from_service(service_list: list, provider: str) -> list:
|
||||
checks = set()
|
||||
for service in service_list:
|
||||
modules = recover_checks_from_provider(provider, service)
|
||||
if not modules:
|
||||
logger.error(f"Service '{service}' does not have checks.")
|
||||
|
||||
else:
|
||||
for check_module in modules:
|
||||
# Recover check name and module name from import path
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check_module[0].split(".")[-1]
|
||||
# If the service is present in the group list passed as parameters
|
||||
# if service_name in group_list: checks_from_arn.add(check_name)
|
||||
checks.add(check_name)
|
||||
return checks
|
||||
|
||||
@@ -2,6 +2,7 @@ from prowler.lib.check.check import (
|
||||
parse_checks_from_compliance_framework,
|
||||
parse_checks_from_file,
|
||||
recover_checks_from_provider,
|
||||
recover_checks_from_service,
|
||||
)
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
@@ -43,19 +44,7 @@ def load_checks_to_execute(
|
||||
|
||||
# Handle if there are services passed using -s/--services
|
||||
elif service_list:
|
||||
# Loaded dynamically from modules within provider/services
|
||||
for service in service_list:
|
||||
modules = recover_checks_from_provider(provider, service)
|
||||
if not modules:
|
||||
logger.error(f"Service '{service}' does not have checks.")
|
||||
else:
|
||||
for check_module in modules:
|
||||
# Recover check name and module name from import path
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check_module[0].split(".")[-1]
|
||||
# If the service is present in the group list passed as parameters
|
||||
# if service_name in group_list: checks_to_execute.add(check_name)
|
||||
checks_to_execute.add(check_name)
|
||||
checks_to_execute = recover_checks_from_service(service_list, provider)
|
||||
|
||||
# Handle if there are compliance frameworks passed using --compliance
|
||||
elif compliance_frameworks:
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import sys
|
||||
|
||||
from pydantic import parse_obj_as
|
||||
|
||||
from prowler.lib.check.compliance_models import (
|
||||
Compliance_Base_Model,
|
||||
Compliance_Requirement,
|
||||
)
|
||||
from prowler.lib.check.models import Check_Metadata_Model
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
@@ -17,6 +20,7 @@ def update_checks_metadata_with_compliance(
|
||||
for framework in bulk_compliance_frameworks.values():
|
||||
for requirement in framework.Requirements:
|
||||
compliance_requirements = []
|
||||
# Verify if check is in the requirement
|
||||
if check in requirement.Checks:
|
||||
# Create the Compliance_Requirement
|
||||
requirement = Compliance_Requirement(
|
||||
@@ -34,13 +38,61 @@ def update_checks_metadata_with_compliance(
|
||||
Framework=framework.Framework,
|
||||
Provider=framework.Provider,
|
||||
Version=framework.Version,
|
||||
Description=framework.Description,
|
||||
Requirements=compliance_requirements,
|
||||
)
|
||||
# Include the compliance framework for the check
|
||||
check_compliance.append(compliance)
|
||||
# Save it into the check's metadata
|
||||
bulk_checks_metadata[check].Compliance = check_compliance
|
||||
|
||||
# Add requirements of Manual Controls
|
||||
for framework in bulk_compliance_frameworks.values():
|
||||
for requirement in framework.Requirements:
|
||||
compliance_requirements = []
|
||||
# Verify if requirement is Manual
|
||||
if not requirement.Checks:
|
||||
compliance_requirements.append(requirement)
|
||||
# Create the Compliance_Model
|
||||
compliance = Compliance_Base_Model(
|
||||
Framework=framework.Framework,
|
||||
Provider=framework.Provider,
|
||||
Version=framework.Version,
|
||||
Description=framework.Description,
|
||||
Requirements=compliance_requirements,
|
||||
)
|
||||
# Include the compliance framework for the check
|
||||
check_compliance.append(compliance)
|
||||
# Create metadata for Manual Control
|
||||
manual_check_metadata = {
|
||||
"Provider": "aws",
|
||||
"CheckID": "manual_check",
|
||||
"CheckTitle": "Manual Check",
|
||||
"CheckType": [],
|
||||
"ServiceName": "",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "",
|
||||
"ResourceType": "",
|
||||
"Description": "",
|
||||
"Risk": "",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {"CLI": "", "NativeIaC": "", "Other": "", "Terraform": ""},
|
||||
"Recommendation": {"Text": "", "Url": ""},
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
}
|
||||
manual_check = parse_obj_as(Check_Metadata_Model, manual_check_metadata)
|
||||
# Save it into the check's metadata
|
||||
bulk_checks_metadata["manual_check"] = manual_check
|
||||
bulk_checks_metadata["manual_check"].Compliance = check_compliance
|
||||
|
||||
return bulk_checks_metadata
|
||||
except Exception as e:
|
||||
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import sys
|
||||
from enum import Enum
|
||||
from typing import Any, List, Optional, Union
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel, ValidationError
|
||||
from pydantic import BaseModel, ValidationError, root_validator
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
@@ -11,10 +11,10 @@ from prowler.lib.logger import logger
|
||||
class ENS_Requirements_Nivel(str, Enum):
|
||||
"""ENS V3 Requirements Level"""
|
||||
|
||||
opcional = "opcional"
|
||||
bajo = "bajo"
|
||||
medio = "medio"
|
||||
alto = "alto"
|
||||
pytec = "pytec"
|
||||
|
||||
|
||||
class ENS_Requirements_Dimensiones(str, Enum):
|
||||
@@ -27,35 +27,101 @@ class ENS_Requirements_Dimensiones(str, Enum):
|
||||
disponibilidad = "disponibilidad"
|
||||
|
||||
|
||||
class ENS_Requirements_Tipos(str, Enum):
|
||||
"""ENS Requirements Tipos"""
|
||||
|
||||
refuerzo = "refuerzo"
|
||||
requisito = "requisito"
|
||||
recomendacion = "recomendacion"
|
||||
medida = "medida"
|
||||
|
||||
|
||||
class ENS_Requirements(BaseModel):
|
||||
"""ENS V3 Framework Requirements"""
|
||||
|
||||
IdGrupoControl: str
|
||||
Marco: str
|
||||
Categoria: str
|
||||
Descripcion_Control: str
|
||||
Nivel: list[ENS_Requirements_Nivel]
|
||||
DescripcionControl: str
|
||||
Tipo: ENS_Requirements_Tipos
|
||||
Nivel: ENS_Requirements_Nivel
|
||||
Dimensiones: list[ENS_Requirements_Dimensiones]
|
||||
|
||||
|
||||
# Generic Compliance Requirements
|
||||
class Generic_Compliance_Requirements(BaseModel):
|
||||
"""Generic Compliance Requirements"""
|
||||
|
||||
ItemId: str
|
||||
Section: Optional[str]
|
||||
SubSection: Optional[str]
|
||||
SubGroup: Optional[str]
|
||||
Service: str
|
||||
Soc_Type: Optional[str]
|
||||
|
||||
|
||||
class CIS_Requirements_Profile(str):
|
||||
"""CIS Requirements Profile"""
|
||||
|
||||
Level_1 = "Level 1"
|
||||
Level_2 = "Level 2"
|
||||
|
||||
|
||||
class CIS_Requirements_AssessmentStatus(str):
|
||||
"""CIS Requirements Assessment Status"""
|
||||
|
||||
Manual = "Manual"
|
||||
Automated = "Automated"
|
||||
|
||||
|
||||
# CIS Requirements
|
||||
class CIS_Requirements(BaseModel):
|
||||
"""CIS Requirements"""
|
||||
|
||||
Section: str
|
||||
Profile: CIS_Requirements_Profile
|
||||
AssessmentStatus: CIS_Requirements_AssessmentStatus
|
||||
Description: str
|
||||
RationaleStatement: str
|
||||
ImpactStatement: str
|
||||
RemediationProcedure: str
|
||||
AuditProcedure: str
|
||||
AdditionalInformation: str
|
||||
References: str
|
||||
|
||||
|
||||
# Base Compliance Model
|
||||
class Compliance_Requirement(BaseModel):
|
||||
"""Compliance_Requirement holds the base model for every requirement within a compliance framework"""
|
||||
|
||||
Id: str
|
||||
Description: str
|
||||
Attributes: list[Union[ENS_Requirements, Any]]
|
||||
Checks: List[str]
|
||||
Attributes: list[
|
||||
Union[CIS_Requirements, ENS_Requirements, Generic_Compliance_Requirements]
|
||||
]
|
||||
Checks: list[str]
|
||||
|
||||
|
||||
class Compliance_Base_Model(BaseModel):
|
||||
"""Compliance_Base_Model holds the base model for every compliance framework"""
|
||||
|
||||
Framework: str
|
||||
Provider: Optional[str]
|
||||
Version: str
|
||||
Provider: str
|
||||
Version: Optional[str]
|
||||
Description: str
|
||||
Requirements: list[Compliance_Requirement]
|
||||
|
||||
@root_validator(pre=True)
|
||||
# noqa: F841 - since vulture raises unused variable 'cls'
|
||||
def framework_and_provider_must_not_be_empty(cls, values): # noqa: F841
|
||||
framework, provider = (
|
||||
values.get("Framework"),
|
||||
values.get("Provider"),
|
||||
)
|
||||
if framework == "" or provider == "":
|
||||
raise ValueError("Framework or Provider must not be empty")
|
||||
return values
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_compliance_framework(
|
||||
@@ -70,6 +136,6 @@ def load_compliance_framework(
|
||||
logger.critical(
|
||||
f"Compliance Framework Specification from {compliance_specification_file} is not valid: {error}"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
else:
|
||||
return compliance_framework
|
||||
|
||||
@@ -48,7 +48,6 @@ class Check_Metadata_Model(BaseModel):
|
||||
RelatedUrl: str
|
||||
Remediation: Remediation
|
||||
Categories: list[str]
|
||||
Tags: dict
|
||||
DependsOn: list[str]
|
||||
RelatedTo: list[str]
|
||||
Notes: str
|
||||
@@ -63,8 +62,9 @@ class Check(ABC, Check_Metadata_Model):
|
||||
def __init__(self, **data):
|
||||
"""Check's init function. Calls the CheckMetadataModel init."""
|
||||
# Parse the Check's metadata file
|
||||
metadata_file = os.path.abspath(sys.modules[self.__module__].__file__).replace(
|
||||
".py", ".metadata.json"
|
||||
metadata_file = (
|
||||
os.path.abspath(sys.modules[self.__module__].__file__)[:-3]
|
||||
+ ".metadata.json"
|
||||
)
|
||||
# Store it to validate them with Pydantic
|
||||
data = Check_Metadata_Model.parse_file(metadata_file).dict()
|
||||
@@ -128,6 +128,23 @@ class Check_Report_Azure(Check_Report):
|
||||
self.subscription = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class Check_Report_GCP(Check_Report):
|
||||
"""Contains the GCP Check's finding information."""
|
||||
|
||||
resource_name: str
|
||||
resource_id: str
|
||||
project_id: str
|
||||
location: str
|
||||
|
||||
def __init__(self, metadata):
|
||||
super().__init__(metadata)
|
||||
self.resource_name = ""
|
||||
self.resource_id = ""
|
||||
self.project_id = ""
|
||||
self.location = ""
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_check_metadata(metadata_file: str) -> Check_Metadata_Model:
|
||||
"""load_check_metadata loads and parse a Check's metadata file"""
|
||||
@@ -135,6 +152,6 @@ def load_check_metadata(metadata_file: str) -> Check_Metadata_Model:
|
||||
check_metadata = Check_Metadata_Model.parse_file(metadata_file)
|
||||
except ValidationError as error:
|
||||
logger.critical(f"Metadata from {metadata_file} is not valid: {error}")
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
else:
|
||||
return check_metadata
|
||||
|
||||
@@ -3,10 +3,20 @@ import sys
|
||||
from argparse import RawTextHelpFormatter
|
||||
|
||||
from prowler.config.config import (
|
||||
available_compliance_frameworks,
|
||||
check_current_version,
|
||||
default_output_directory,
|
||||
get_aws_available_regions,
|
||||
prowler_version,
|
||||
)
|
||||
from prowler.providers.aws.aws_provider import get_aws_available_regions
|
||||
from prowler.providers.aws.lib.arn.arn import is_valid_arn
|
||||
|
||||
|
||||
def arn_type(arn: str) -> bool:
|
||||
"""arn_type returns a string ARN if it is valid and raises an argparse.ArgumentError if not."""
|
||||
if not is_valid_arn(arn):
|
||||
raise argparse.ArgumentError("Invalid ARN")
|
||||
return arn
|
||||
|
||||
|
||||
class ProwlerArgumentParser:
|
||||
@@ -19,7 +29,6 @@ class ProwlerArgumentParser:
|
||||
epilog="""
|
||||
To see the different available options on a specific provider, run:
|
||||
prowler {provider} -h|--help
|
||||
|
||||
Detailed documentation at https://docs.prowler.cloud
|
||||
""",
|
||||
)
|
||||
@@ -28,7 +37,7 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
"-v",
|
||||
"--version",
|
||||
action="version",
|
||||
version=f"Prowler {prowler_version}",
|
||||
version=f"Prowler {prowler_version} {check_current_version(prowler_version)}",
|
||||
help="show Prowler version",
|
||||
)
|
||||
# Common arguments parser
|
||||
@@ -49,6 +58,7 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
# Init Providers Arguments
|
||||
self.__init_aws_parser__()
|
||||
self.__init_azure_parser__()
|
||||
self.__init_gcp_parser__()
|
||||
|
||||
def parse(self, args=None) -> argparse.Namespace:
|
||||
"""
|
||||
@@ -82,6 +92,10 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
"A provider is required to see its specific help options."
|
||||
)
|
||||
|
||||
# Only Logging Configuration
|
||||
if args.only_logs:
|
||||
args.no_banner = True
|
||||
|
||||
return args
|
||||
|
||||
def __set_default_provider__(self, args: list) -> list:
|
||||
@@ -101,7 +115,7 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
"-q",
|
||||
"--quiet",
|
||||
action="store_true",
|
||||
help="Show only Prowler failed findings",
|
||||
help="Store or send only Prowler failed findings",
|
||||
)
|
||||
common_outputs_parser.add_argument(
|
||||
"-M",
|
||||
@@ -130,7 +144,13 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
help="Display detailed information about findings",
|
||||
)
|
||||
common_outputs_parser.add_argument(
|
||||
"-b", "--no-banner", action="store_false", help="Hide Prowler banner"
|
||||
"-z",
|
||||
"--ignore-exit-code-3",
|
||||
action="store_true",
|
||||
help="Failed checks do not trigger exit code 3",
|
||||
)
|
||||
common_outputs_parser.add_argument(
|
||||
"-b", "--no-banner", action="store_true", help="Hide Prowler banner"
|
||||
)
|
||||
|
||||
def __init_logging_parser__(self):
|
||||
@@ -150,6 +170,11 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
nargs="?",
|
||||
help="Set log file name",
|
||||
)
|
||||
common_logging_parser.add_argument(
|
||||
"--only-logs",
|
||||
action="store_true",
|
||||
help="Print only Prowler logs by the stdout. This option sets --no-banner.",
|
||||
)
|
||||
|
||||
def __init_exclude_checks_parser__(self):
|
||||
# Exclude checks options
|
||||
@@ -192,7 +217,7 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
"--compliance",
|
||||
nargs="+",
|
||||
help="Compliance Framework to check against for. The format should be the following: framework_version_provider (e.g.: ens_rd2022_aws)",
|
||||
choices=["ens_rd2022_aws", "cis_1.4_aws", "cis_1.5_aws"],
|
||||
choices=available_compliance_frameworks,
|
||||
)
|
||||
group.add_argument(
|
||||
"--categories",
|
||||
@@ -221,7 +246,7 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
"--list-compliance-requirements",
|
||||
nargs="+",
|
||||
help="List compliance requirements for a given requirement",
|
||||
choices=["ens_rd2022_aws", "cis_1.4_aws", "cis_1.5_aws"],
|
||||
choices=available_compliance_frameworks,
|
||||
)
|
||||
list_group.add_argument(
|
||||
"--list-categories",
|
||||
@@ -293,6 +318,11 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
action="store_true",
|
||||
help="Send check output to AWS Security Hub",
|
||||
)
|
||||
aws_security_hub_subparser.add_argument(
|
||||
"--skip-sh-update",
|
||||
action="store_true",
|
||||
help="Skip updating previous findings of Prowler in Security Hub",
|
||||
)
|
||||
# AWS Quick Inventory
|
||||
aws_quick_inventory_subparser = aws_parser.add_argument_group("Quick Inventory")
|
||||
aws_quick_inventory_subparser.add_argument(
|
||||
@@ -335,7 +365,35 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
"--allowlist-file",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="Path for allowlist yaml file. See example prowler/config/allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table ARN or S3 URI, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/",
|
||||
help="Path for allowlist yaml file. See example prowler/config/allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table or Lambda ARNs or S3 URIs, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/",
|
||||
)
|
||||
# Based Scans
|
||||
aws_based_scans_subparser = aws_parser.add_argument_group("AWS Based Scans")
|
||||
aws_based_scans_parser = (
|
||||
aws_based_scans_subparser.add_mutually_exclusive_group()
|
||||
)
|
||||
aws_based_scans_parser.add_argument(
|
||||
"--resource-tags",
|
||||
nargs="+",
|
||||
default=None,
|
||||
help="Scan only resources with specific AWS Tags (Key=Value), e.g., Environment=dev Project=prowler",
|
||||
)
|
||||
aws_based_scans_parser.add_argument(
|
||||
"--resource-arn",
|
||||
nargs="+",
|
||||
type=arn_type,
|
||||
default=None,
|
||||
help="Scan only resources with specific AWS Resource ARNs, e.g., arn:aws:iam::012345678910:user/test arn:aws:ec2:us-east-1:123456789012:vpc/vpc-12345678",
|
||||
)
|
||||
|
||||
# Boto3 Config
|
||||
boto3_config_subparser = aws_parser.add_argument_group("Boto3 Config")
|
||||
boto3_config_subparser.add_argument(
|
||||
"--aws-retries-max-attempts",
|
||||
nargs="?",
|
||||
default=None,
|
||||
type=int,
|
||||
help="Set the maximum attemps for the Boto3 standard retrier config (Default: 3)",
|
||||
)
|
||||
|
||||
def __init_azure_parser__(self):
|
||||
@@ -374,3 +432,18 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
default=[],
|
||||
help="Azure subscription ids to be scanned by prowler",
|
||||
)
|
||||
|
||||
def __init_gcp_parser__(self):
|
||||
"""Init the GCP Provider CLI parser"""
|
||||
gcp_parser = self.subparsers.add_parser(
|
||||
"gcp", parents=[self.common_providers_parser], help="GCP Provider"
|
||||
)
|
||||
# Authentication Modes
|
||||
gcp_auth_subparser = gcp_parser.add_argument_group("Authentication Modes")
|
||||
gcp_auth_modes_group = gcp_auth_subparser.add_mutually_exclusive_group()
|
||||
gcp_auth_modes_group.add_argument(
|
||||
"--credentials-file",
|
||||
nargs="?",
|
||||
metavar="FILE_PATH",
|
||||
help="Authenticate using a Google Service Account Application Credentials JSON file",
|
||||
)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import logging
|
||||
from os import environ
|
||||
|
||||
# Logging levels
|
||||
logging_levels = {
|
||||
@@ -10,7 +11,7 @@ logging_levels = {
|
||||
}
|
||||
|
||||
|
||||
def set_logging_config(log_file: str = None, log_level: str = "ERROR"):
|
||||
def set_logging_config(log_level: str, log_file: str = None, only_logs: bool = False):
|
||||
# Logs formatter
|
||||
stream_formatter = logging.Formatter(
|
||||
"%(asctime)s [File: %(filename)s:%(lineno)d] \t[Module: %(module)s]\t %(levelname)s: %(message)s"
|
||||
@@ -22,9 +23,12 @@ def set_logging_config(log_file: str = None, log_level: str = "ERROR"):
|
||||
# Where to put logs
|
||||
logging_handlers = []
|
||||
|
||||
# Include stdout by default
|
||||
# Include stdout by default, if only_logs is set the log format is JSON
|
||||
stream_handler = logging.StreamHandler()
|
||||
stream_handler.setFormatter(stream_formatter)
|
||||
if only_logs:
|
||||
stream_handler.setFormatter(log_file_formatter)
|
||||
else:
|
||||
stream_handler.setFormatter(stream_formatter)
|
||||
logging_handlers.append(stream_handler)
|
||||
|
||||
# Log to file configuration
|
||||
@@ -35,6 +39,12 @@ def set_logging_config(log_file: str = None, log_level: str = "ERROR"):
|
||||
# Append the log formatter
|
||||
logging_handlers.append(log_file_handler)
|
||||
|
||||
# Set Log Level, environment takes precedence over the --log-level argument
|
||||
try:
|
||||
log_level = environ["LOG_LEVEL"]
|
||||
except KeyError:
|
||||
log_level = log_level
|
||||
|
||||
# Configure Logger
|
||||
# Initialize you log configuration using the base class
|
||||
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
||||
|
||||
@@ -4,112 +4,166 @@ from csv import DictWriter
|
||||
from colorama import Fore, Style
|
||||
from tabulate import tabulate
|
||||
|
||||
from prowler.config.config import timestamp
|
||||
from prowler.config.config import orange_color, timestamp
|
||||
from prowler.lib.check.models import Check_Report
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.models import (
|
||||
Check_Output_CSV_CIS,
|
||||
Check_Output_CSV_ENS_RD2022,
|
||||
Check_Output_CSV_Generic_Compliance,
|
||||
generate_csv_fields,
|
||||
)
|
||||
|
||||
|
||||
def fill_compliance(output_options, finding, audit_info, file_descriptors):
|
||||
# We have to retrieve all the check's compliance requirements
|
||||
check_compliance = output_options.bulk_checks_metadata[
|
||||
finding.check_metadata.CheckID
|
||||
].Compliance
|
||||
csv_header = compliance_row = None
|
||||
for compliance in check_compliance:
|
||||
if (
|
||||
compliance.Framework == "ENS"
|
||||
and compliance.Version == "RD2022"
|
||||
and "ens_rd2022_aws" in output_options.output_modes
|
||||
):
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_ENS_RD2022(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
AccountId=audit_info.audited_account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_IdGrupoControl=attribute.get(
|
||||
"IdGrupoControl"
|
||||
),
|
||||
Requirements_Attributes_Marco=attribute.get("Marco"),
|
||||
Requirements_Attributes_Categoria=attribute.get("Categoria"),
|
||||
Requirements_Attributes_DescripcionControl=attribute.get(
|
||||
"DescripcionControl"
|
||||
),
|
||||
Requirements_Attributes_Nivel=attribute.get("Nivel"),
|
||||
Requirements_Attributes_Tipo=attribute.get("Tipo"),
|
||||
Requirements_Attributes_Dimensiones=",".join(
|
||||
attribute.get("Dimensiones")
|
||||
),
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
)
|
||||
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_ENS_RD2022)
|
||||
|
||||
elif compliance.Framework == "CIS-AWS" and "cis" in str(
|
||||
output_options.output_modes
|
||||
):
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_CIS(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
AccountId=audit_info.audited_account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_Section=attribute.get("Section"),
|
||||
Requirements_Attributes_Profile=attribute.get("Profile"),
|
||||
Requirements_Attributes_AssessmentStatus=attribute.get(
|
||||
"AssessmentStatus"
|
||||
),
|
||||
Requirements_Attributes_Description=attribute.get(
|
||||
"Description"
|
||||
),
|
||||
Requirements_Attributes_RationaleStatement=attribute.get(
|
||||
"RationaleStatement"
|
||||
),
|
||||
Requirements_Attributes_ImpactStatement=attribute.get(
|
||||
"ImpactStatement"
|
||||
),
|
||||
Requirements_Attributes_RemediationProcedure=attribute.get(
|
||||
"RemediationProcedure"
|
||||
),
|
||||
Requirements_Attributes_AuditProcedure=attribute.get(
|
||||
"AuditProcedure"
|
||||
),
|
||||
Requirements_Attributes_AdditionalInformation=attribute.get(
|
||||
"AdditionalInformation"
|
||||
),
|
||||
Requirements_Attributes_References=attribute.get("References"),
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
)
|
||||
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_CIS)
|
||||
|
||||
if compliance_row:
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors[output_options.output_modes[-1]],
|
||||
fieldnames=csv_header,
|
||||
delimiter=";",
|
||||
def add_manual_controls(output_options, audit_info, file_descriptors):
|
||||
try:
|
||||
# Check if MANUAL control was already added to output
|
||||
if "manual_check" in output_options.bulk_checks_metadata:
|
||||
manual_finding = Check_Report(
|
||||
output_options.bulk_checks_metadata["manual_check"].json()
|
||||
)
|
||||
csv_writer.writerow(compliance_row.__dict__)
|
||||
manual_finding.status = "INFO"
|
||||
manual_finding.status_extended = "Manual check"
|
||||
manual_finding.resource_id = "manual_check"
|
||||
manual_finding.region = ""
|
||||
fill_compliance(
|
||||
output_options, manual_finding, audit_info, file_descriptors
|
||||
)
|
||||
del output_options.bulk_checks_metadata["manual_check"]
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def fill_compliance(output_options, finding, audit_info, file_descriptors):
|
||||
try:
|
||||
# We have to retrieve all the check's compliance requirements
|
||||
check_compliance = output_options.bulk_checks_metadata[
|
||||
finding.check_metadata.CheckID
|
||||
].Compliance
|
||||
for compliance in check_compliance:
|
||||
csv_header = compliance_row = compliance_output = None
|
||||
if (
|
||||
compliance.Framework == "ENS"
|
||||
and compliance.Version == "RD2022"
|
||||
and "ens_rd2022_aws" in output_options.output_modes
|
||||
):
|
||||
compliance_output = "ens_rd2022_aws"
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_ENS_RD2022(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=audit_info.audited_account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_IdGrupoControl=attribute.IdGrupoControl,
|
||||
Requirements_Attributes_Marco=attribute.Marco,
|
||||
Requirements_Attributes_Categoria=attribute.Categoria,
|
||||
Requirements_Attributes_DescripcionControl=attribute.DescripcionControl,
|
||||
Requirements_Attributes_Nivel=attribute.Nivel,
|
||||
Requirements_Attributes_Tipo=attribute.Tipo,
|
||||
Requirements_Attributes_Dimensiones=",".join(
|
||||
attribute.Dimensiones
|
||||
),
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
)
|
||||
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_ENS_RD2022)
|
||||
|
||||
elif compliance.Framework == "CIS" and "cis_" in str(
|
||||
output_options.output_modes
|
||||
):
|
||||
# Only with the version of CIS that was selected
|
||||
if "cis_" + compliance.Version + "_aws" in str(
|
||||
output_options.output_modes
|
||||
):
|
||||
compliance_output = "cis_" + compliance.Version + "_aws"
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_CIS(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=audit_info.audited_account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
)
|
||||
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_CIS)
|
||||
|
||||
else:
|
||||
compliance_output = compliance.Framework
|
||||
if compliance.Version != "":
|
||||
compliance_output += "_" + compliance.Version
|
||||
if compliance.Provider != "":
|
||||
compliance_output += "_" + compliance.Provider
|
||||
|
||||
compliance_output = compliance_output.lower().replace("-", "_")
|
||||
if compliance_output in output_options.output_modes:
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_Generic_Compliance(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=audit_info.audited_account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_SubSection=attribute.SubSection,
|
||||
Requirements_Attributes_SubGroup=attribute.SubGroup,
|
||||
Requirements_Attributes_Service=attribute.Service,
|
||||
Requirements_Attributes_Soc_Type=attribute.Soc_Type,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
)
|
||||
|
||||
csv_header = generate_csv_fields(
|
||||
Check_Output_CSV_Generic_Compliance
|
||||
)
|
||||
|
||||
if compliance_row:
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors[compliance_output],
|
||||
fieldnames=csv_header,
|
||||
delimiter=";",
|
||||
)
|
||||
csv_writer.writerow(compliance_row.__dict__)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def display_compliance_table(
|
||||
@@ -120,16 +174,16 @@ def display_compliance_table(
|
||||
output_directory: str,
|
||||
):
|
||||
try:
|
||||
if "ens_rd2022_aws" in compliance_framework:
|
||||
if "ens_rd2022_aws" == compliance_framework:
|
||||
marcos = {}
|
||||
ens_compliance_table = {
|
||||
"Proveedor": [],
|
||||
"Marco/Categoria": [],
|
||||
"Estado": [],
|
||||
"PYTEC": [],
|
||||
"Alto": [],
|
||||
"Medio": [],
|
||||
"Bajo": [],
|
||||
"Opcional": [],
|
||||
}
|
||||
pass_count = fail_count = 0
|
||||
for finding in findings:
|
||||
@@ -147,13 +201,13 @@ def display_compliance_table(
|
||||
for requirement in compliance.Requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
marco_categoria = (
|
||||
f"{attribute['Marco']}/{attribute['Categoria']}"
|
||||
f"{attribute.Marco}/{attribute.Categoria}"
|
||||
)
|
||||
# Check if Marco/Categoria exists
|
||||
if marco_categoria not in marcos:
|
||||
marcos[marco_categoria] = {
|
||||
"Estado": f"{Fore.GREEN}CUMPLE{Style.RESET_ALL}",
|
||||
"Pytec": 0,
|
||||
"Opcional": 0,
|
||||
"Alto": 0,
|
||||
"Medio": 0,
|
||||
"Bajo": 0,
|
||||
@@ -165,13 +219,13 @@ def display_compliance_table(
|
||||
] = f"{Fore.RED}NO CUMPLE{Style.RESET_ALL}"
|
||||
elif finding.status == "PASS":
|
||||
pass_count += 1
|
||||
if attribute["Nivel"] == "pytec":
|
||||
marcos[marco_categoria]["Pytec"] += 1
|
||||
elif attribute["Nivel"] == "alto":
|
||||
if attribute.Nivel == "opcional":
|
||||
marcos[marco_categoria]["Opcional"] += 1
|
||||
elif attribute.Nivel == "alto":
|
||||
marcos[marco_categoria]["Alto"] += 1
|
||||
elif attribute["Nivel"] == "medio":
|
||||
elif attribute.Nivel == "medio":
|
||||
marcos[marco_categoria]["Medio"] += 1
|
||||
elif attribute["Nivel"] == "bajo":
|
||||
elif attribute.Nivel == "bajo":
|
||||
marcos[marco_categoria]["Bajo"] += 1
|
||||
|
||||
# Add results to table
|
||||
@@ -179,17 +233,17 @@ def display_compliance_table(
|
||||
ens_compliance_table["Proveedor"].append("aws")
|
||||
ens_compliance_table["Marco/Categoria"].append(marco)
|
||||
ens_compliance_table["Estado"].append(marcos[marco]["Estado"])
|
||||
ens_compliance_table["PYTEC"].append(
|
||||
f"{Fore.LIGHTRED_EX}{marcos[marco]['Pytec']}{Style.RESET_ALL}"
|
||||
ens_compliance_table["Opcional"].append(
|
||||
f"{Fore.BLUE}{marcos[marco]['Opcional']}{Style.RESET_ALL}"
|
||||
)
|
||||
ens_compliance_table["Alto"].append(
|
||||
f"{Fore.RED}{marcos[marco]['Alto']}{Style.RESET_ALL}"
|
||||
f"{Fore.LIGHTRED_EX}{marcos[marco]['Alto']}{Style.RESET_ALL}"
|
||||
)
|
||||
ens_compliance_table["Medio"].append(
|
||||
f"{Fore.YELLOW}{marcos[marco]['Medio']}{Style.RESET_ALL}"
|
||||
f"{orange_color}{marcos[marco]['Medio']}{Style.RESET_ALL}"
|
||||
)
|
||||
ens_compliance_table["Bajo"].append(
|
||||
f"{Fore.BLUE}{marcos[marco]['Bajo']}{Style.RESET_ALL}"
|
||||
f"{Fore.YELLOW}{marcos[marco]['Bajo']}{Style.RESET_ALL}"
|
||||
)
|
||||
if fail_count + pass_count < 0:
|
||||
print(
|
||||
@@ -217,11 +271,11 @@ def display_compliance_table(
|
||||
print(
|
||||
f"{Style.BRIGHT}* Solo aparece el Marco/Categoria que contiene resultados.{Style.RESET_ALL}"
|
||||
)
|
||||
print("\nResultados detallados en:")
|
||||
print(f"\nResultados detallados de {compliance_fm} en:")
|
||||
print(
|
||||
f" - CSV: {output_directory}/{output_filename}_{compliance_framework[0]}.csv\n"
|
||||
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
|
||||
)
|
||||
if "cis" in str(compliance_framework):
|
||||
elif "cis_1." in compliance_framework:
|
||||
sections = {}
|
||||
cis_compliance_table = {
|
||||
"Provider": [],
|
||||
@@ -234,14 +288,15 @@ def display_compliance_table(
|
||||
check = bulk_checks_metadata[finding.check_metadata.CheckID]
|
||||
check_compliances = check.Compliance
|
||||
for compliance in check_compliances:
|
||||
if compliance.Framework == "CIS-AWS" and compliance.Version in str(
|
||||
compliance_framework
|
||||
if (
|
||||
compliance.Framework == "CIS"
|
||||
and compliance.Version in compliance_framework
|
||||
):
|
||||
compliance_version = compliance.Version
|
||||
compliance_fm = compliance.Framework
|
||||
for requirement in compliance.Requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
section = attribute["Section"]
|
||||
section = attribute.Section
|
||||
# Check if Section exists
|
||||
if section not in sections:
|
||||
sections[section] = {
|
||||
@@ -253,12 +308,12 @@ def display_compliance_table(
|
||||
fail_count += 1
|
||||
elif finding.status == "PASS":
|
||||
pass_count += 1
|
||||
if attribute["Profile"] == "Level 1":
|
||||
if attribute.Profile == "Level 1":
|
||||
if finding.status == "FAIL":
|
||||
sections[section]["Level 1"]["FAIL"] += 1
|
||||
else:
|
||||
sections[section]["Level 1"]["PASS"] += 1
|
||||
elif attribute["Profile"] == "Level 2":
|
||||
elif attribute.Profile == "Level 2":
|
||||
if finding.status == "FAIL":
|
||||
sections[section]["Level 2"]["FAIL"] += 1
|
||||
else:
|
||||
@@ -285,7 +340,7 @@ def display_compliance_table(
|
||||
cis_compliance_table["Level 2"].append(
|
||||
f"{Fore.GREEN}PASS({sections[section]['Level 2']['PASS']}){Style.RESET_ALL}"
|
||||
)
|
||||
if fail_count + pass_count < 0:
|
||||
if fail_count + pass_count < 1:
|
||||
print(
|
||||
f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}-{compliance_version}{Style.RESET_ALL}.\n"
|
||||
)
|
||||
@@ -311,12 +366,17 @@ def display_compliance_table(
|
||||
print(
|
||||
f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}"
|
||||
)
|
||||
print("\nDetailed Results in:")
|
||||
print(f"\nDetailed results of {compliance_fm} are in:")
|
||||
print(
|
||||
f" - CSV: {output_directory}/{output_filename}_{compliance_framework[0]}.csv\n"
|
||||
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
|
||||
)
|
||||
else:
|
||||
print(f"\nDetailed results of {compliance_framework.upper()} are in:")
|
||||
print(
|
||||
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
@@ -15,11 +15,14 @@ from prowler.lib.outputs.models import (
|
||||
Azure_Check_Output_CSV,
|
||||
Check_Output_CSV_CIS,
|
||||
Check_Output_CSV_ENS_RD2022,
|
||||
Check_Output_CSV_Generic_Compliance,
|
||||
Gcp_Check_Output_CSV,
|
||||
generate_csv_fields,
|
||||
)
|
||||
from prowler.lib.utils.utils import file_exists, open_file
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
|
||||
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
|
||||
|
||||
|
||||
def initialize_file_descriptor(
|
||||
@@ -41,18 +44,17 @@ def initialize_file_descriptor(
|
||||
"a",
|
||||
)
|
||||
|
||||
if output_mode in ("csv", "ens_rd2022_aws", "cis_1.5_aws", "cis_1.4_aws"):
|
||||
if output_mode in ("json", "json-asff"):
|
||||
file_descriptor.write("[")
|
||||
elif "html" in output_mode:
|
||||
add_html_header(file_descriptor, audit_info)
|
||||
else:
|
||||
# Format is the class model of the CSV format to print the headers
|
||||
csv_header = [x.upper() for x in generate_csv_fields(format)]
|
||||
csv_writer = DictWriter(
|
||||
file_descriptor, fieldnames=csv_header, delimiter=";"
|
||||
)
|
||||
csv_writer.writeheader()
|
||||
|
||||
if output_mode in ("json", "json-asff"):
|
||||
file_descriptor.write("[")
|
||||
if "html" in output_mode:
|
||||
add_html_header(file_descriptor, audit_info)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
@@ -82,17 +84,23 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
audit_info,
|
||||
Azure_Check_Output_CSV,
|
||||
)
|
||||
if isinstance(audit_info, GCP_Audit_Info):
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Gcp_Check_Output_CSV,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "json":
|
||||
elif output_mode == "json":
|
||||
filename = f"{output_directory}/{output_filename}{json_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if isinstance(audit_info, AWS_Audit_Info):
|
||||
|
||||
elif isinstance(audit_info, AWS_Audit_Info):
|
||||
if output_mode == "json-asff":
|
||||
filename = f"{output_directory}/{output_filename}{json_asff_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
@@ -100,7 +108,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "html":
|
||||
elif output_mode == "html":
|
||||
filename = (
|
||||
f"{output_directory}/{output_filename}{html_file_suffix}"
|
||||
)
|
||||
@@ -109,7 +117,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "ens_rd2022_aws":
|
||||
elif output_mode == "ens_rd2022_aws":
|
||||
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
@@ -119,19 +127,31 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "cis_1.5_aws":
|
||||
elif output_mode == "cis_1.5_aws":
|
||||
filename = f"{output_directory}/{output_filename}_cis_1.5_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info, Check_Output_CSV_CIS
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
if output_mode == "cis_1.4_aws":
|
||||
elif output_mode == "cis_1.4_aws":
|
||||
filename = f"{output_directory}/{output_filename}_cis_1.4_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info, Check_Output_CSV_CIS
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
else:
|
||||
# Generic Compliance framework
|
||||
filename = f"{output_directory}/{output_filename}_{output_mode}{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV_Generic_Compliance,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import sys
|
||||
from os import path
|
||||
|
||||
from prowler.config.config import (
|
||||
html_file_suffix,
|
||||
@@ -8,6 +9,12 @@ from prowler.config.config import (
|
||||
timestamp,
|
||||
)
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.models import (
|
||||
get_check_compliance,
|
||||
parse_html_string,
|
||||
unroll_dict,
|
||||
unroll_tags,
|
||||
)
|
||||
from prowler.lib.utils.utils import open_file
|
||||
|
||||
|
||||
@@ -182,16 +189,16 @@ def add_html_header(file_descriptor, audit_info):
|
||||
<tr>
|
||||
<th scope="col">Status</th>
|
||||
<th scope="col">Severity</th>
|
||||
<th style="width:5%" scope="col">Service Name</th>
|
||||
<th scope="col">Service Name</th>
|
||||
<th scope="col">Region</th>
|
||||
<th style="width:20%" scope="col">Check ID</th>
|
||||
<th style="width:20%" scope="col">Check Title</th>
|
||||
<th scope="col">Resource ID</th>
|
||||
<th style="width:15%" scope="col">Check Description</th>
|
||||
<th scope="col">Check ID</th>
|
||||
<th scope="col">Resource Tags</th>
|
||||
<th scope="col">Status Extended</th>
|
||||
<th scope="col">Risk</th>
|
||||
<th scope="col">Recomendation</th>
|
||||
<th style="width:5%" scope="col">Recomendation URL</th>
|
||||
<th scope="col">Compliance</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
@@ -203,7 +210,7 @@ def add_html_header(file_descriptor, audit_info):
|
||||
)
|
||||
|
||||
|
||||
def fill_html(file_descriptor, finding):
|
||||
def fill_html(file_descriptor, finding, output_options):
|
||||
row_class = "p-3 mb-2 bg-success-custom"
|
||||
if finding.status == "INFO":
|
||||
row_class = "table-info"
|
||||
@@ -218,14 +225,14 @@ def fill_html(file_descriptor, finding):
|
||||
<td>{finding.check_metadata.Severity}</td>
|
||||
<td>{finding.check_metadata.ServiceName}</td>
|
||||
<td>{finding.region}</td>
|
||||
<td>{finding.check_metadata.CheckID.replace("_", "<wbr>_")}</td>
|
||||
<td>{finding.check_metadata.CheckTitle}</td>
|
||||
<td>{finding.resource_id.replace("<", "<").replace(">", ">").replace("_", "<wbr>_")}</td>
|
||||
<td>{finding.check_metadata.Description}</td>
|
||||
<td>{finding.check_metadata.CheckID.replace("_", "<wbr>_")}</td>
|
||||
<td>{parse_html_string(unroll_tags(finding.resource_tags))}</td>
|
||||
<td>{finding.status_extended.replace("<", "<").replace(">", ">").replace("_", "<wbr>_")}</td>
|
||||
<td><p class="show-read-more">{finding.check_metadata.Risk}</p></td>
|
||||
<td><p class="show-read-more">{finding.check_metadata.Remediation.Recommendation.Text}</p></td>
|
||||
<td><a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
|
||||
<td><p class="show-read-more">{finding.check_metadata.Remediation.Recommendation.Text}</p> <a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
|
||||
<td><p class="show-read-more">{parse_html_string(unroll_dict(get_check_compliance(finding, finding.check_metadata.Provider, output_options)))}</p></td>
|
||||
</tr>
|
||||
"""
|
||||
)
|
||||
@@ -235,40 +242,45 @@ def fill_html_overview_statistics(stats, output_filename, output_directory):
|
||||
try:
|
||||
filename = f"{output_directory}/{output_filename}{html_file_suffix}"
|
||||
# Read file
|
||||
with open(filename, "r") as file:
|
||||
filedata = file.read()
|
||||
if path.isfile(filename):
|
||||
with open(filename, "r") as file:
|
||||
filedata = file.read()
|
||||
|
||||
# Replace statistics
|
||||
# TOTAL_FINDINGS
|
||||
filedata = filedata.replace("TOTAL_FINDINGS", str(stats.get("findings_count")))
|
||||
# TOTAL_RESOURCES
|
||||
filedata = filedata.replace(
|
||||
"TOTAL_RESOURCES", str(stats.get("resources_count"))
|
||||
)
|
||||
# TOTAL_PASS
|
||||
filedata = filedata.replace("TOTAL_PASS", str(stats.get("total_pass")))
|
||||
# TOTAL_FAIL
|
||||
filedata = filedata.replace("TOTAL_FAIL", str(stats.get("total_fail")))
|
||||
# Write file
|
||||
with open(filename, "w") as file:
|
||||
file.write(filedata)
|
||||
# Replace statistics
|
||||
# TOTAL_FINDINGS
|
||||
filedata = filedata.replace(
|
||||
"TOTAL_FINDINGS", str(stats.get("findings_count"))
|
||||
)
|
||||
# TOTAL_RESOURCES
|
||||
filedata = filedata.replace(
|
||||
"TOTAL_RESOURCES", str(stats.get("resources_count"))
|
||||
)
|
||||
# TOTAL_PASS
|
||||
filedata = filedata.replace("TOTAL_PASS", str(stats.get("total_pass")))
|
||||
# TOTAL_FAIL
|
||||
filedata = filedata.replace("TOTAL_FAIL", str(stats.get("total_fail")))
|
||||
# Write file
|
||||
with open(filename, "w") as file:
|
||||
file.write(filedata)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def add_html_footer(output_filename, output_directory):
|
||||
try:
|
||||
filename = f"{output_directory}/{output_filename}{html_file_suffix}"
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
file_descriptor.write(
|
||||
"""
|
||||
# Close HTML file if exists
|
||||
if path.isfile(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
file_descriptor.write(
|
||||
"""
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
@@ -352,10 +364,10 @@ def add_html_footer(output_filename, output_directory):
|
||||
|
||||
</html>
|
||||
"""
|
||||
)
|
||||
file_descriptor.close()
|
||||
)
|
||||
file_descriptor.close()
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
@@ -8,18 +8,26 @@ from prowler.config.config import (
|
||||
timestamp_utc,
|
||||
)
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.models import Compliance, ProductFields, Resource, Severity
|
||||
from prowler.lib.outputs.models import (
|
||||
Compliance,
|
||||
ProductFields,
|
||||
Resource,
|
||||
Severity,
|
||||
get_check_compliance,
|
||||
)
|
||||
from prowler.lib.utils.utils import hash_sha512, open_file
|
||||
|
||||
|
||||
def fill_json_asff(finding_output, audit_info, finding):
|
||||
def fill_json_asff(finding_output, audit_info, finding, output_options):
|
||||
# Check if there are no resources in the finding
|
||||
if finding.resource_id == "":
|
||||
finding.resource_id = "NONE_PROVIDED"
|
||||
if finding.resource_arn == "":
|
||||
if finding.resource_id == "":
|
||||
finding.resource_id = "NONE_PROVIDED"
|
||||
finding.resource_arn = finding.resource_id
|
||||
finding_output.Id = f"prowler-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{hash_sha512(finding.resource_id)}"
|
||||
finding_output.ProductArn = f"arn:{audit_info.audited_partition}:securityhub:{finding.region}::product/prowler/prowler"
|
||||
finding_output.ProductFields = ProductFields(
|
||||
ProviderVersion=prowler_version, ProwlerResourceName=finding.resource_id
|
||||
ProviderVersion=prowler_version, ProwlerResourceName=finding.resource_arn
|
||||
)
|
||||
finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
||||
finding_output.AwsAccountId = audit_info.audited_account
|
||||
@@ -29,23 +37,31 @@ def fill_json_asff(finding_output, audit_info, finding):
|
||||
) = finding_output.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
finding_output.Severity = Severity(Label=finding.check_metadata.Severity.upper())
|
||||
finding_output.Title = finding.check_metadata.CheckTitle
|
||||
finding_output.Description = finding.check_metadata.Description
|
||||
finding_output.Description = finding.status_extended
|
||||
finding_output.Resources = [
|
||||
Resource(
|
||||
Id=finding.resource_id,
|
||||
Id=finding.resource_arn,
|
||||
Type=finding.check_metadata.ResourceType,
|
||||
Partition=audit_info.audited_partition,
|
||||
Region=finding.region,
|
||||
)
|
||||
]
|
||||
# Check if any Requirement has > 64 characters
|
||||
check_types = []
|
||||
for type in finding.check_metadata.CheckType:
|
||||
check_types.extend(type.split("/"))
|
||||
# Iterate for each compliance framework
|
||||
compliance_summary = []
|
||||
associated_standards = []
|
||||
check_compliance = get_check_compliance(finding, "aws", output_options)
|
||||
for key, value in check_compliance.items():
|
||||
associated_standards.append({"StandardsId": key})
|
||||
item = f"{key} {' '.join(value)}"
|
||||
if len(item) > 64:
|
||||
item = item[0:63]
|
||||
compliance_summary.append(item)
|
||||
|
||||
# Add ED to PASS or FAIL (PASSED/FAILED)
|
||||
finding_output.Compliance = Compliance(
|
||||
Status=finding.status + "ED",
|
||||
RelatedRequirements=check_types,
|
||||
AssociatedStandards=associated_standards,
|
||||
RelatedRequirements=compliance_summary,
|
||||
)
|
||||
finding_output.Remediation = {
|
||||
"Recommendation": finding.check_metadata.Remediation.Recommendation
|
||||
@@ -55,23 +71,27 @@ def fill_json_asff(finding_output, audit_info, finding):
|
||||
|
||||
|
||||
def close_json(output_filename, output_directory, mode):
|
||||
"""close_json closes the output JSON file replacing the last comma with ]"""
|
||||
try:
|
||||
suffix = json_file_suffix
|
||||
if mode == "json-asff":
|
||||
suffix = json_asff_file_suffix
|
||||
filename = f"{output_directory}/{output_filename}{suffix}"
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
# Replace last comma for square bracket if not empty
|
||||
if file_descriptor.tell() > 0:
|
||||
file_descriptor.seek(file_descriptor.tell() - 1, os.SEEK_SET)
|
||||
file_descriptor.truncate()
|
||||
file_descriptor.write("]")
|
||||
file_descriptor.close()
|
||||
# Close JSON file if exists
|
||||
if os.path.isfile(filename):
|
||||
file_descriptor = open_file(
|
||||
filename,
|
||||
"a",
|
||||
)
|
||||
# Replace last comma for square bracket if not empty
|
||||
if file_descriptor.tell() > 0:
|
||||
if file_descriptor.tell() != 1:
|
||||
file_descriptor.seek(file_descriptor.tell() - 1, os.SEEK_SET)
|
||||
file_descriptor.truncate()
|
||||
file_descriptor.write("]")
|
||||
file_descriptor.close()
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
@@ -11,7 +11,26 @@ from prowler.lib.logger import logger
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info
|
||||
|
||||
|
||||
def generate_provider_output_csv(provider: str, finding, audit_info, mode: str, fd):
|
||||
def get_check_compliance(finding, provider, output_options):
|
||||
check_compliance = {}
|
||||
# We have to retrieve all the check's compliance requirements
|
||||
for compliance in output_options.bulk_checks_metadata[
|
||||
finding.check_metadata.CheckID
|
||||
].Compliance:
|
||||
compliance_fw = compliance.Framework
|
||||
if compliance.Version:
|
||||
compliance_fw = f"{compliance_fw}-{compliance.Version}"
|
||||
if compliance.Provider == provider.upper():
|
||||
if compliance_fw not in check_compliance:
|
||||
check_compliance[compliance_fw] = []
|
||||
for requirement in compliance.Requirements:
|
||||
check_compliance[compliance_fw].append(requirement.Id)
|
||||
return check_compliance
|
||||
|
||||
|
||||
def generate_provider_output_csv(
|
||||
provider: str, finding, audit_info, mode: str, fd, output_options
|
||||
):
|
||||
"""
|
||||
set_provider_output_options configures automatically the outputs based on the selected provider and returns the Provider_Output_Options object.
|
||||
"""
|
||||
@@ -32,6 +51,22 @@ def generate_provider_output_csv(provider: str, finding, audit_info, mode: str,
|
||||
data[
|
||||
"finding_unique_id"
|
||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.subscription}-{finding.resource_id}"
|
||||
data["compliance"] = unroll_dict(
|
||||
get_check_compliance(finding, provider, output_options)
|
||||
)
|
||||
finding_output = output_model(**data)
|
||||
|
||||
if provider == "gcp":
|
||||
data["resource_id"] = finding.resource_id
|
||||
data["resource_name"] = finding.resource_name
|
||||
data["project_id"] = finding.project_id
|
||||
data["location"] = finding.location
|
||||
data[
|
||||
"finding_unique_id"
|
||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
|
||||
data["compliance"] = unroll_dict(
|
||||
get_check_compliance(finding, provider, output_options)
|
||||
)
|
||||
finding_output = output_model(**data)
|
||||
|
||||
if provider == "aws":
|
||||
@@ -43,6 +78,9 @@ def generate_provider_output_csv(provider: str, finding, audit_info, mode: str,
|
||||
data[
|
||||
"finding_unique_id"
|
||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{finding.resource_id}"
|
||||
data["compliance"] = unroll_dict(
|
||||
get_check_compliance(finding, provider, output_options)
|
||||
)
|
||||
finding_output = output_model(**data)
|
||||
|
||||
if audit_info.organizations_metadata:
|
||||
@@ -91,7 +129,7 @@ def fill_common_data_csv(finding: dict) -> dict:
|
||||
"severity": finding.check_metadata.Severity,
|
||||
"resource_type": finding.check_metadata.ResourceType,
|
||||
"resource_details": finding.resource_details,
|
||||
"resource_tags": finding.resource_tags,
|
||||
"resource_tags": unroll_tags(finding.resource_tags),
|
||||
"description": finding.check_metadata.Description,
|
||||
"risk": finding.check_metadata.Risk,
|
||||
"related_url": finding.check_metadata.RelatedUrl,
|
||||
@@ -113,26 +151,99 @@ def fill_common_data_csv(finding: dict) -> dict:
|
||||
"remediation_recommendation_code_other": (
|
||||
finding.check_metadata.Remediation.Code.Other
|
||||
),
|
||||
"categories": __unroll_list__(finding.check_metadata.Categories),
|
||||
"depends_on": __unroll_list__(finding.check_metadata.DependsOn),
|
||||
"related_to": __unroll_list__(finding.check_metadata.RelatedTo),
|
||||
"categories": unroll_list(finding.check_metadata.Categories),
|
||||
"depends_on": unroll_list(finding.check_metadata.DependsOn),
|
||||
"related_to": unroll_list(finding.check_metadata.RelatedTo),
|
||||
"notes": finding.check_metadata.Notes,
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
def __unroll_list__(listed_items: list):
|
||||
def unroll_list(listed_items: list):
|
||||
unrolled_items = ""
|
||||
separator = "|"
|
||||
for item in listed_items:
|
||||
if not unrolled_items:
|
||||
unrolled_items = f"{item}"
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items}{separator}{item}"
|
||||
if listed_items:
|
||||
for item in listed_items:
|
||||
if not unrolled_items:
|
||||
unrolled_items = f"{item}"
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items} {separator} {item}"
|
||||
|
||||
return unrolled_items
|
||||
|
||||
|
||||
def unroll_tags(tags: list):
|
||||
unrolled_items = ""
|
||||
separator = "|"
|
||||
if tags and tags != [{}] and tags != [None]:
|
||||
for item in tags:
|
||||
# Check if there are tags in list
|
||||
if type(item) == dict:
|
||||
for key, value in item.items():
|
||||
if not unrolled_items:
|
||||
# Check the pattern of tags (Key:Value or Key:key/Value:value)
|
||||
if "Key" != key and "Value" != key:
|
||||
unrolled_items = f"{key}={value}"
|
||||
else:
|
||||
if "Key" == key:
|
||||
unrolled_items = f"{value}="
|
||||
else:
|
||||
unrolled_items = f"{value}"
|
||||
else:
|
||||
if "Key" != key and "Value" != key:
|
||||
unrolled_items = (
|
||||
f"{unrolled_items} {separator} {key}={value}"
|
||||
)
|
||||
else:
|
||||
if "Key" == key:
|
||||
unrolled_items = (
|
||||
f"{unrolled_items} {separator} {value}="
|
||||
)
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items}{value}"
|
||||
elif not unrolled_items:
|
||||
unrolled_items = f"{item}"
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items} {separator} {item}"
|
||||
|
||||
return unrolled_items
|
||||
|
||||
|
||||
def unroll_dict(dict: dict):
|
||||
unrolled_items = ""
|
||||
separator = "|"
|
||||
for key, value in dict.items():
|
||||
if type(value) == list:
|
||||
value = ", ".join(value)
|
||||
if not unrolled_items:
|
||||
unrolled_items = f"{key}: {value}"
|
||||
else:
|
||||
unrolled_items = f"{unrolled_items} {separator} {key}: {value}"
|
||||
|
||||
return unrolled_items
|
||||
|
||||
|
||||
def parse_html_string(str: str):
|
||||
string = ""
|
||||
for elem in str.split(" | "):
|
||||
if elem:
|
||||
string += f"\n•{elem}\n"
|
||||
|
||||
return string
|
||||
|
||||
|
||||
def parse_json_tags(tags: list):
|
||||
dict_tags = {}
|
||||
if tags and tags != [{}] and tags != [None]:
|
||||
for tag in tags:
|
||||
if "Key" in tag and "Value" in tag:
|
||||
dict_tags[tag["Key"]] = tag["Value"]
|
||||
else:
|
||||
dict_tags.update(tag)
|
||||
|
||||
return dict_tags
|
||||
|
||||
|
||||
def generate_csv_fields(format: Any) -> list[str]:
|
||||
"""Generates the CSV headers for the given class"""
|
||||
csv_fields = []
|
||||
@@ -162,7 +273,7 @@ class Check_Output_CSV(BaseModel):
|
||||
severity: str
|
||||
resource_type: str
|
||||
resource_details: str
|
||||
resource_tags: list
|
||||
resource_tags: str
|
||||
description: str
|
||||
risk: str
|
||||
related_url: str
|
||||
@@ -172,6 +283,7 @@ class Check_Output_CSV(BaseModel):
|
||||
remediation_recommendation_code_terraform: str
|
||||
remediation_recommendation_code_cli: str
|
||||
remediation_recommendation_code_other: str
|
||||
compliance: str
|
||||
categories: str
|
||||
depends_on: str
|
||||
related_to: str
|
||||
@@ -206,7 +318,20 @@ class Azure_Check_Output_CSV(Check_Output_CSV):
|
||||
resource_name: str = ""
|
||||
|
||||
|
||||
def generate_provider_output_json(provider: str, finding, audit_info, mode: str, fd):
|
||||
class Gcp_Check_Output_CSV(Check_Output_CSV):
|
||||
"""
|
||||
Gcp_Check_Output_CSV generates a finding's output in CSV format for the GCP provider.
|
||||
"""
|
||||
|
||||
project_id: str = ""
|
||||
location: str = ""
|
||||
resource_id: str = ""
|
||||
resource_name: str = ""
|
||||
|
||||
|
||||
def generate_provider_output_json(
|
||||
provider: str, finding, audit_info, mode: str, output_options
|
||||
):
|
||||
"""
|
||||
generate_provider_output_json configures automatically the outputs based on the selected provider and returns the Check_Output_JSON object.
|
||||
"""
|
||||
@@ -228,6 +353,19 @@ def generate_provider_output_json(provider: str, finding, audit_info, mode: str,
|
||||
finding_output.ResourceId = finding.resource_id
|
||||
finding_output.ResourceName = finding.resource_name
|
||||
finding_output.FindingUniqueId = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.subscription}-{finding.resource_id}"
|
||||
finding_output.Compliance = get_check_compliance(
|
||||
finding, provider, output_options
|
||||
)
|
||||
|
||||
if provider == "gcp":
|
||||
finding_output.ProjectId = audit_info.project_id
|
||||
finding_output.Location = finding.location
|
||||
finding_output.ResourceId = finding.resource_id
|
||||
finding_output.ResourceName = finding.resource_name
|
||||
finding_output.FindingUniqueId = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
|
||||
finding_output.Compliance = get_check_compliance(
|
||||
finding, provider, output_options
|
||||
)
|
||||
|
||||
if provider == "aws":
|
||||
finding_output.Profile = audit_info.profile
|
||||
@@ -235,7 +373,11 @@ def generate_provider_output_json(provider: str, finding, audit_info, mode: str,
|
||||
finding_output.Region = finding.region
|
||||
finding_output.ResourceId = finding.resource_id
|
||||
finding_output.ResourceArn = finding.resource_arn
|
||||
finding_output.ResourceTags = parse_json_tags(finding.resource_tags)
|
||||
finding_output.FindingUniqueId = f"prowler-{provider}-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{finding.resource_id}"
|
||||
finding_output.Compliance = get_check_compliance(
|
||||
finding, provider, output_options
|
||||
)
|
||||
|
||||
if audit_info.organizations_metadata:
|
||||
finding_output.OrganizationsInfo = (
|
||||
@@ -246,7 +388,7 @@ def generate_provider_output_json(provider: str, finding, audit_info, mode: str,
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
else:
|
||||
return finding_output
|
||||
|
||||
@@ -271,11 +413,11 @@ class Check_Output_JSON(BaseModel):
|
||||
Severity: str
|
||||
ResourceType: str
|
||||
ResourceDetails: str = ""
|
||||
Tags: dict
|
||||
Description: str
|
||||
Risk: str
|
||||
RelatedUrl: str
|
||||
Remediation: Remediation
|
||||
Compliance: Optional[dict]
|
||||
Categories: List[str]
|
||||
DependsOn: List[str]
|
||||
RelatedTo: List[str]
|
||||
@@ -293,6 +435,7 @@ class Aws_Check_Output_JSON(Check_Output_JSON):
|
||||
Region: str = ""
|
||||
ResourceId: str = ""
|
||||
ResourceArn: str = ""
|
||||
ResourceTags: list = []
|
||||
|
||||
def __init__(self, **metadata):
|
||||
super().__init__(**metadata)
|
||||
@@ -300,7 +443,7 @@ class Aws_Check_Output_JSON(Check_Output_JSON):
|
||||
|
||||
class Azure_Check_Output_JSON(Check_Output_JSON):
|
||||
"""
|
||||
Aws_Check_Output_JSON generates a finding's output in JSON format for the AWS provider.
|
||||
Azure_Check_Output_JSON generates a finding's output in JSON format for the AWS provider.
|
||||
"""
|
||||
|
||||
Tenant_Domain: str = ""
|
||||
@@ -312,12 +455,27 @@ class Azure_Check_Output_JSON(Check_Output_JSON):
|
||||
super().__init__(**metadata)
|
||||
|
||||
|
||||
class Gcp_Check_Output_JSON(Check_Output_JSON):
|
||||
"""
|
||||
Gcp_Check_Output_JSON generates a finding's output in JSON format for the AWS provider.
|
||||
"""
|
||||
|
||||
ProjectId: str = ""
|
||||
ResourceId: str = ""
|
||||
ResourceName: str = ""
|
||||
Location: str = ""
|
||||
|
||||
def __init__(self, **metadata):
|
||||
super().__init__(**metadata)
|
||||
|
||||
|
||||
class Check_Output_CSV_ENS_RD2022(BaseModel):
|
||||
"""
|
||||
Check_Output_CSV_ENS_RD2022 generates a finding's output in CSV ENS RD2022 format.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
AccountId: str
|
||||
Region: str
|
||||
AssessmentDate: str
|
||||
@@ -338,10 +496,11 @@ class Check_Output_CSV_ENS_RD2022(BaseModel):
|
||||
|
||||
class Check_Output_CSV_CIS(BaseModel):
|
||||
"""
|
||||
Check_Output_CSV_ENS_RD2022 generates a finding's output in CSV CIS format.
|
||||
Check_Output_CSV_CIS generates a finding's output in CSV CIS format.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
AccountId: str
|
||||
Region: str
|
||||
AssessmentDate: str
|
||||
@@ -363,6 +522,29 @@ class Check_Output_CSV_CIS(BaseModel):
|
||||
CheckId: str
|
||||
|
||||
|
||||
class Check_Output_CSV_Generic_Compliance(BaseModel):
|
||||
"""
|
||||
Check_Output_CSV_Generic_Compliance generates a finding's output in CSV Generic Compliance format.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
AccountId: str
|
||||
Region: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_Section: Optional[str]
|
||||
Requirements_Attributes_SubSection: Optional[str]
|
||||
Requirements_Attributes_SubGroup: Optional[str]
|
||||
Requirements_Attributes_Service: str
|
||||
Requirements_Attributes_Soc_Type: Optional[str]
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
CheckId: str
|
||||
|
||||
|
||||
# JSON ASFF Output
|
||||
class ProductFields(BaseModel):
|
||||
ProviderName: str = "Prowler"
|
||||
@@ -384,6 +566,7 @@ class Resource(BaseModel):
|
||||
class Compliance(BaseModel):
|
||||
Status: str
|
||||
RelatedRequirements: List[str]
|
||||
AssociatedStandards: List[dict]
|
||||
|
||||
|
||||
class Check_Output_JSON_ASFF(BaseModel):
|
||||
|
||||
@@ -4,6 +4,7 @@ import sys
|
||||
from colorama import Fore, Style
|
||||
|
||||
from prowler.config.config import (
|
||||
available_compliance_frameworks,
|
||||
csv_file_suffix,
|
||||
html_file_suffix,
|
||||
json_asff_file_suffix,
|
||||
@@ -11,7 +12,7 @@ from prowler.config.config import (
|
||||
orange_color,
|
||||
)
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.compliance import fill_compliance
|
||||
from prowler.lib.outputs.compliance import add_manual_controls, fill_compliance
|
||||
from prowler.lib.outputs.file_descriptors import fill_file_descriptors
|
||||
from prowler.lib.outputs.html import fill_html
|
||||
from prowler.lib.outputs.json import fill_json_asff
|
||||
@@ -19,6 +20,7 @@ from prowler.lib.outputs.models import (
|
||||
Check_Output_JSON_ASFF,
|
||||
generate_provider_output_csv,
|
||||
generate_provider_output_json,
|
||||
unroll_tags,
|
||||
)
|
||||
from prowler.providers.aws.lib.allowlist.allowlist import is_allowlisted
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
@@ -31,12 +33,10 @@ def stdout_report(finding, color, verbose, is_quiet):
|
||||
details = finding.region
|
||||
if finding.check_metadata.Provider == "azure":
|
||||
details = finding.check_metadata.ServiceName
|
||||
if finding.check_metadata.Provider == "gcp":
|
||||
details = finding.location
|
||||
|
||||
if is_quiet and "FAIL" in finding.status:
|
||||
print(
|
||||
f"\t{color}{finding.status}{Style.RESET_ALL} {details}: {finding.status_extended}"
|
||||
)
|
||||
elif not is_quiet and verbose:
|
||||
if verbose and not (is_quiet and finding.status != "FAIL"):
|
||||
print(
|
||||
f"\t{color}{finding.status}{Style.RESET_ALL} {details}: {finding.status_extended}"
|
||||
)
|
||||
@@ -73,6 +73,7 @@ def report(check_findings, output_options, audit_info):
|
||||
finding.check_metadata.CheckID,
|
||||
finding.region,
|
||||
finding.resource_id,
|
||||
unroll_tags(finding.resource_tags),
|
||||
):
|
||||
finding.status = "WARNING"
|
||||
# Print findings by stdout
|
||||
@@ -82,76 +83,92 @@ def report(check_findings, output_options, audit_info):
|
||||
)
|
||||
|
||||
if file_descriptors:
|
||||
# AWS specific outputs
|
||||
if finding.check_metadata.Provider == "aws":
|
||||
if (
|
||||
"ens_rd2022_aws" in output_options.output_modes
|
||||
or "cis" in str(output_options.output_modes)
|
||||
):
|
||||
fill_compliance(
|
||||
output_options, finding, audit_info, file_descriptors
|
||||
# Check if --quiet to only add fails to outputs
|
||||
if not (finding.status != "FAIL" and output_options.is_quiet):
|
||||
# AWS specific outputs
|
||||
if finding.check_metadata.Provider == "aws":
|
||||
if any(
|
||||
compliance in output_options.output_modes
|
||||
for compliance in available_compliance_frameworks
|
||||
):
|
||||
fill_compliance(
|
||||
output_options,
|
||||
finding,
|
||||
audit_info,
|
||||
file_descriptors,
|
||||
)
|
||||
|
||||
add_manual_controls(
|
||||
output_options,
|
||||
audit_info,
|
||||
file_descriptors,
|
||||
)
|
||||
|
||||
if "html" in file_descriptors:
|
||||
fill_html(
|
||||
file_descriptors["html"], finding, output_options
|
||||
)
|
||||
file_descriptors["html"].write("")
|
||||
|
||||
if "json-asff" in file_descriptors:
|
||||
finding_output = Check_Output_JSON_ASFF()
|
||||
fill_json_asff(
|
||||
finding_output, audit_info, finding, output_options
|
||||
)
|
||||
|
||||
json.dump(
|
||||
finding_output.dict(),
|
||||
file_descriptors["json-asff"],
|
||||
indent=4,
|
||||
)
|
||||
file_descriptors["json-asff"].write(",")
|
||||
|
||||
# Check if it is needed to send findings to security hub
|
||||
if (
|
||||
output_options.security_hub_enabled
|
||||
and finding.status != "INFO"
|
||||
):
|
||||
send_to_security_hub(
|
||||
output_options.is_quiet,
|
||||
finding.status,
|
||||
finding.region,
|
||||
finding_output,
|
||||
audit_info.audit_session,
|
||||
)
|
||||
|
||||
# Common outputs
|
||||
if "csv" in file_descriptors:
|
||||
csv_writer, finding_output = generate_provider_output_csv(
|
||||
finding.check_metadata.Provider,
|
||||
finding,
|
||||
audit_info,
|
||||
"csv",
|
||||
file_descriptors["csv"],
|
||||
output_options,
|
||||
)
|
||||
csv_writer.writerow(finding_output.__dict__)
|
||||
|
||||
if "html" in file_descriptors:
|
||||
fill_html(file_descriptors["html"], finding)
|
||||
file_descriptors["html"].write("")
|
||||
|
||||
if "json-asff" in file_descriptors:
|
||||
finding_output = Check_Output_JSON_ASFF()
|
||||
fill_json_asff(finding_output, audit_info, finding)
|
||||
|
||||
if "json" in file_descriptors:
|
||||
finding_output = generate_provider_output_json(
|
||||
finding.check_metadata.Provider,
|
||||
finding,
|
||||
audit_info,
|
||||
"json",
|
||||
output_options,
|
||||
)
|
||||
json.dump(
|
||||
finding_output.dict(),
|
||||
file_descriptors["json-asff"],
|
||||
file_descriptors["json"],
|
||||
indent=4,
|
||||
)
|
||||
file_descriptors["json-asff"].write(",")
|
||||
|
||||
# Check if it is needed to send findings to security hub
|
||||
if (
|
||||
output_options.security_hub_enabled
|
||||
and finding.status != "INFO"
|
||||
):
|
||||
send_to_security_hub(
|
||||
output_options.is_quiet,
|
||||
finding.status,
|
||||
finding.region,
|
||||
finding_output,
|
||||
audit_info.audit_session,
|
||||
)
|
||||
|
||||
# Common outputs
|
||||
if "csv" in file_descriptors:
|
||||
csv_writer, finding_output = generate_provider_output_csv(
|
||||
finding.check_metadata.Provider,
|
||||
finding,
|
||||
audit_info,
|
||||
"csv",
|
||||
file_descriptors["csv"],
|
||||
)
|
||||
csv_writer.writerow(finding_output.__dict__)
|
||||
|
||||
if "json" in file_descriptors:
|
||||
finding_output = generate_provider_output_json(
|
||||
finding.check_metadata.Provider,
|
||||
finding,
|
||||
audit_info,
|
||||
"json",
|
||||
file_descriptors["json"],
|
||||
)
|
||||
json.dump(
|
||||
finding_output.dict(),
|
||||
file_descriptors["json"],
|
||||
indent=4,
|
||||
)
|
||||
file_descriptors["json"].write(",")
|
||||
file_descriptors["json"].write(",")
|
||||
|
||||
else: # No service resources in the whole account
|
||||
color = set_report_color("INFO")
|
||||
if not output_options.is_quiet and output_options.verbose:
|
||||
if output_options.verbose:
|
||||
print(f"\t{color}INFO{Style.RESET_ALL} There are no resources")
|
||||
# Separator between findings and bar
|
||||
if output_options.is_quiet or output_options.verbose:
|
||||
if output_options.verbose:
|
||||
print()
|
||||
if file_descriptors:
|
||||
# Close all file descriptors
|
||||
@@ -195,8 +212,12 @@ def send_to_s3_bucket(
|
||||
filename = f"{output_filename}{json_asff_file_suffix}"
|
||||
elif output_mode == "html":
|
||||
filename = f"{output_filename}{html_file_suffix}"
|
||||
else: # Compliance output mode
|
||||
filename = f"{output_filename}_{output_mode}{csv_file_suffix}"
|
||||
logger.info(f"Sending outputs to S3 bucket {output_bucket}")
|
||||
bucket_remote_dir = output_directory.split("/")[-1]
|
||||
bucket_remote_dir = output_directory
|
||||
while "prowler/" in bucket_remote_dir: # Check if it is not a custom directory
|
||||
bucket_remote_dir = bucket_remote_dir.partition("prowler/")[-1]
|
||||
file_name = output_directory + "/" + filename
|
||||
bucket_name = output_bucket
|
||||
object_name = bucket_remote_dir + "/" + output_mode + "/" + filename
|
||||
@@ -207,7 +228,7 @@ def send_to_s3_bucket(
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def extract_findings_statistics(findings: list) -> dict:
|
||||
|
||||
@@ -26,6 +26,9 @@ def display_summary_table(
|
||||
else:
|
||||
entity_type = "Tenant ID/s"
|
||||
audited_entities = " ".join(audit_info.identity.tenant_ids)
|
||||
elif provider == "gcp":
|
||||
entity_type = "Project ID"
|
||||
audited_entities = audit_info.project_id
|
||||
|
||||
if findings:
|
||||
current = {
|
||||
@@ -53,7 +56,6 @@ def display_summary_table(
|
||||
current["Service"] != finding.check_metadata.ServiceName
|
||||
and current["Service"]
|
||||
):
|
||||
|
||||
add_service_to_table(findings_table, current)
|
||||
|
||||
current["Total"] = current["Critical"] = current["High"] = current[
|
||||
@@ -120,7 +122,7 @@ def display_summary_table(
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def add_service_to_table(findings_table, current):
|
||||
|
||||
17
prowler/lib/scan_filters/scan_filters.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
def is_resource_filtered(resource: str, audit_resources: list) -> bool:
|
||||
"""
|
||||
Check if the resource passed as argument is present in the audit_resources.
|
||||
|
||||
Returns True if it is filtered and False if it does not match the input filters
|
||||
"""
|
||||
try:
|
||||
if resource in str(audit_resources):
|
||||
return True
|
||||
return False
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error} ({resource})"
|
||||
)
|
||||
@@ -1,21 +1,31 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from hashlib import sha512
|
||||
from io import TextIOWrapper
|
||||
from os.path import exists
|
||||
from typing import Any
|
||||
|
||||
from detect_secrets import SecretsCollection
|
||||
from detect_secrets.settings import default_settings
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
def open_file(input_file: str, mode: str = "r") -> TextIOWrapper:
|
||||
try:
|
||||
f = open(input_file, mode)
|
||||
except OSError:
|
||||
logger.critical(
|
||||
"Ooops! You reached your user session maximum open files. To solve this issue, increase the shell session limit by running this command `ulimit -n 4096`. For more info visit https://docs.prowler.cloud/en/latest/troubleshooting/"
|
||||
)
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.critical(
|
||||
f"{input_file}: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
else:
|
||||
return f
|
||||
|
||||
@@ -28,7 +38,7 @@ def parse_json_file(input_file: TextIOWrapper) -> Any:
|
||||
logger.critical(
|
||||
f"{input_file.name}: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
else:
|
||||
return json_file
|
||||
|
||||
@@ -41,7 +51,7 @@ def file_exists(filename: str):
|
||||
logger.critical(
|
||||
f"{exists_filename.name}: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
else:
|
||||
return exists_filename
|
||||
|
||||
@@ -49,3 +59,20 @@ def file_exists(filename: str):
|
||||
# create sha512 hash for string
|
||||
def hash_sha512(string: str) -> str:
|
||||
return sha512(string.encode("utf-8")).hexdigest()[0:9]
|
||||
|
||||
|
||||
def detect_secrets_scan(data):
|
||||
temp_data_file = tempfile.NamedTemporaryFile(delete=False)
|
||||
temp_data_file.write(bytes(data, encoding="raw_unicode_escape"))
|
||||
temp_data_file.close()
|
||||
|
||||
secrets = SecretsCollection()
|
||||
with default_settings():
|
||||
secrets.scan_file(temp_data_file.name)
|
||||
os.remove(temp_data_file.name)
|
||||
|
||||
detect_secrets_output = secrets.json()
|
||||
if detect_secrets_output:
|
||||
return detect_secrets_output[temp_data_file.name]
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
from boto3 import session
|
||||
@@ -6,6 +7,7 @@ from botocore.credentials import RefreshableCredentials
|
||||
from botocore.session import get_session
|
||||
|
||||
from prowler.config.config import aws_services_json_file
|
||||
from prowler.lib.check.check import list_modules, recover_checks_from_service
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.utils.utils import open_file, parse_json_file
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Audit_Info
|
||||
@@ -53,7 +55,7 @@ class AWS_Provider:
|
||||
return session.Session(profile_name=audit_info.profile)
|
||||
except Exception as error:
|
||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
# Refresh credentials method using assume role
|
||||
# This method is called "adding ()" to the name, so it cannot accept arguments
|
||||
@@ -83,7 +85,7 @@ def assume_role(session: session.Session, assumed_role_info: AWS_Assume_Role) ->
|
||||
if assumed_role_info.external_id:
|
||||
assumed_credentials = sts_client.assume_role(
|
||||
RoleArn=assumed_role_info.role_arn,
|
||||
RoleSessionName="ProwlerProAsessmentSession",
|
||||
RoleSessionName="ProwlerAsessmentSession",
|
||||
DurationSeconds=assumed_role_info.session_duration,
|
||||
ExternalId=assumed_role_info.external_id,
|
||||
)
|
||||
@@ -96,7 +98,7 @@ def assume_role(session: session.Session, assumed_role_info: AWS_Assume_Role) ->
|
||||
)
|
||||
except Exception as error:
|
||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
else:
|
||||
return assumed_credentials
|
||||
@@ -108,9 +110,9 @@ def generate_regional_clients(
|
||||
try:
|
||||
regional_clients = {}
|
||||
# Get json locally
|
||||
actual_directory = os.path.dirname(os.path.realpath(__file__))
|
||||
f = open_file(f"{actual_directory}/{aws_services_json_file}")
|
||||
data = parse_json_file(f)
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
with open_file(f"{actual_directory}/{aws_services_json_file}") as f:
|
||||
data = parse_json_file(f)
|
||||
# Check if it is a subservice
|
||||
json_regions = data["services"][service]["regions"][
|
||||
audit_info.audited_partition
|
||||
@@ -129,7 +131,7 @@ def generate_regional_clients(
|
||||
regions = regions[:1]
|
||||
for region in regions:
|
||||
regional_client = audit_info.audit_session.client(
|
||||
service, region_name=region
|
||||
service, region_name=region, config=audit_info.session_config
|
||||
)
|
||||
regional_client.region = region
|
||||
regional_clients[region] = regional_client
|
||||
@@ -138,3 +140,90 @@ def generate_regional_clients(
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def get_aws_available_regions():
|
||||
try:
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
with open_file(f"{actual_directory}/{aws_services_json_file}") as f:
|
||||
data = parse_json_file(f)
|
||||
|
||||
regions = set()
|
||||
for service in data["services"].values():
|
||||
for partition in service["regions"]:
|
||||
for item in service["regions"][partition]:
|
||||
regions.add(item)
|
||||
return list(regions)
|
||||
except Exception as error:
|
||||
logger.error(f"{error.__class__.__name__}: {error}")
|
||||
return []
|
||||
|
||||
|
||||
def get_checks_from_input_arn(audit_resources: list, provider: str) -> set:
|
||||
"""get_checks_from_input_arn gets the list of checks from the input arns"""
|
||||
checks_from_arn = set()
|
||||
# Handle if there are audit resources so only their services are executed
|
||||
if audit_resources:
|
||||
services_without_subservices = ["guardduty", "kms", "s3", "elb"]
|
||||
service_list = set()
|
||||
sub_service_list = set()
|
||||
for resource in audit_resources:
|
||||
service = resource.split(":")[2]
|
||||
sub_service = resource.split(":")[5].split("/")[0].replace("-", "_")
|
||||
# WAF Services does not have checks
|
||||
if service != "wafv2" and service != "waf":
|
||||
# Parse services when they are different in the ARNs
|
||||
if service == "lambda":
|
||||
service = "awslambda"
|
||||
if service == "elasticloadbalancing":
|
||||
service = "elb"
|
||||
elif service == "logs":
|
||||
service = "cloudwatch"
|
||||
# Check if Prowler has checks in service
|
||||
try:
|
||||
list_modules(provider, service)
|
||||
except ModuleNotFoundError:
|
||||
# Service is not supported
|
||||
pass
|
||||
else:
|
||||
service_list.add(service)
|
||||
|
||||
# Get subservices to execute only applicable checks
|
||||
if service not in services_without_subservices:
|
||||
# Parse some specific subservices
|
||||
if service == "ec2":
|
||||
if sub_service == "security_group":
|
||||
sub_service = "securitygroup"
|
||||
if sub_service == "network_acl":
|
||||
sub_service = "networkacl"
|
||||
if sub_service == "image":
|
||||
sub_service = "ami"
|
||||
if service == "rds":
|
||||
if sub_service == "cluster_snapshot":
|
||||
sub_service = "snapshot"
|
||||
sub_service_list.add(sub_service)
|
||||
else:
|
||||
sub_service_list.add(service)
|
||||
|
||||
checks = recover_checks_from_service(service_list, provider)
|
||||
|
||||
# Filter only checks with audited subservices
|
||||
for check in checks:
|
||||
if any(sub_service in check for sub_service in sub_service_list):
|
||||
if not (sub_service == "policy" and "password_policy" in check):
|
||||
checks_from_arn.add(check)
|
||||
|
||||
# Return final checks list
|
||||
return sorted(checks_from_arn)
|
||||
|
||||
|
||||
def get_regions_from_audit_resources(audit_resources: list) -> list:
|
||||
"""get_regions_from_audit_resources gets the regions from the audit resources arns"""
|
||||
audited_regions = []
|
||||
for resource in audit_resources:
|
||||
region = resource.split(":")[3]
|
||||
if region and region not in audited_regions: # Check if arn has a region
|
||||
audited_regions.append(region)
|
||||
if audited_regions:
|
||||
return audited_regions
|
||||
return None
|
||||
|
||||
@@ -3,9 +3,22 @@ import sys
|
||||
|
||||
import yaml
|
||||
from boto3.dynamodb.conditions import Attr
|
||||
from schema import Optional, Schema
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
allowlist_schema = Schema(
|
||||
{
|
||||
"Accounts": {
|
||||
str: {
|
||||
"Checks": {
|
||||
str: {"Regions": list, "Resources": list, Optional("Tags"): list}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def parse_allowlist_file(audit_info, allowlist_file):
|
||||
try:
|
||||
@@ -17,6 +30,17 @@ def parse_allowlist_file(audit_info, allowlist_file):
|
||||
allowlist = yaml.safe_load(
|
||||
s3_client.get_object(Bucket=bucket, Key=key)["Body"]
|
||||
)["Allowlist"]
|
||||
# Check if file is a Lambda Function ARN
|
||||
elif re.search(r"^arn:(\w+):lambda:", allowlist_file):
|
||||
lambda_region = allowlist_file.split(":")[3]
|
||||
lambda_client = audit_info.audit_session.client(
|
||||
"lambda", region_name=lambda_region
|
||||
)
|
||||
lambda_response = lambda_client.invoke(
|
||||
FunctionName=allowlist_file, InvocationType="RequestResponse"
|
||||
)
|
||||
lambda_payload = lambda_response["Payload"].read()
|
||||
allowlist = yaml.safe_load(lambda_payload)["Allowlist"]
|
||||
# Check if file is a DynamoDB ARN
|
||||
elif re.search(
|
||||
r"^arn:aws(-cn|-us-gov)?:dynamodb:[a-z]{2}-[a-z-]+-[1-9]{1}:[0-9]{12}:table\/[a-zA-Z0-9._-]+$",
|
||||
@@ -45,35 +69,55 @@ def parse_allowlist_file(audit_info, allowlist_file):
|
||||
dynamodb_items.update(response["Items"])
|
||||
for item in dynamodb_items:
|
||||
# Create allowlist for every item
|
||||
allowlist["Accounts"][item["Accounts"]] = {
|
||||
"Checks": {
|
||||
item["Checks"]: {
|
||||
"Regions": item["Regions"],
|
||||
"Resources": item["Resources"],
|
||||
if "Tags" in item:
|
||||
allowlist["Accounts"][item["Accounts"]] = {
|
||||
"Checks": {
|
||||
item["Checks"]: {
|
||||
"Regions": item["Regions"],
|
||||
"Resources": item["Resources"],
|
||||
"Tags": item["Tags"],
|
||||
}
|
||||
}
|
||||
}
|
||||
else:
|
||||
allowlist["Accounts"][item["Accounts"]] = {
|
||||
"Checks": {
|
||||
item["Checks"]: {
|
||||
"Regions": item["Regions"],
|
||||
"Resources": item["Resources"],
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else:
|
||||
with open(allowlist_file) as f:
|
||||
allowlist = yaml.safe_load(f)["Allowlist"]
|
||||
try:
|
||||
allowlist_schema.validate(allowlist)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- Allowlist YAML is malformed - {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
return allowlist
|
||||
except Exception as error:
|
||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
||||
sys.exit()
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_allowlisted(allowlist, audited_account, check, region, resource):
|
||||
def is_allowlisted(allowlist, audited_account, check, region, resource, tags):
|
||||
try:
|
||||
if audited_account in allowlist["Accounts"]:
|
||||
if is_allowlisted_in_check(
|
||||
allowlist, audited_account, check, region, resource
|
||||
allowlist, audited_account, check, region, resource, tags
|
||||
):
|
||||
return True
|
||||
# If there is a *, it affects to all accounts
|
||||
if "*" in allowlist["Accounts"]:
|
||||
audited_account = "*"
|
||||
if is_allowlisted_in_check(
|
||||
allowlist, audited_account, check, region, resource
|
||||
allowlist, audited_account, check, region, resource, tags
|
||||
):
|
||||
return True
|
||||
return False
|
||||
@@ -81,22 +125,22 @@ def is_allowlisted(allowlist, audited_account, check, region, resource):
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_allowlisted_in_check(allowlist, audited_account, check, region, resource):
|
||||
def is_allowlisted_in_check(allowlist, audited_account, check, region, resource, tags):
|
||||
try:
|
||||
# If there is a *, it affects to all checks
|
||||
if "*" in allowlist["Accounts"][audited_account]["Checks"]:
|
||||
check = "*"
|
||||
if is_allowlisted_in_region(
|
||||
allowlist, audited_account, check, region, resource
|
||||
allowlist, audited_account, check, region, resource, tags
|
||||
):
|
||||
return True
|
||||
# Check if there is the specific check
|
||||
if check in allowlist["Accounts"][audited_account]["Checks"]:
|
||||
if is_allowlisted_in_region(
|
||||
allowlist, audited_account, check, region, resource
|
||||
allowlist, audited_account, check, region, resource, tags
|
||||
):
|
||||
return True
|
||||
return False
|
||||
@@ -104,27 +148,62 @@ def is_allowlisted_in_check(allowlist, audited_account, check, region, resource)
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_allowlisted_in_region(allowlist, audited_account, check, region, resource):
|
||||
def is_allowlisted_in_region(allowlist, audited_account, check, region, resource, tags):
|
||||
try:
|
||||
# If there is a *, it affects to all regions
|
||||
if "*" in allowlist["Accounts"][audited_account]["Checks"][check]["Regions"]:
|
||||
for elem in allowlist["Accounts"][audited_account]["Checks"][check][
|
||||
"Resources"
|
||||
]:
|
||||
if re.search(re.escape(elem), resource):
|
||||
if is_allowlisted_in_tags(
|
||||
allowlist["Accounts"][audited_account]["Checks"][check],
|
||||
elem,
|
||||
resource,
|
||||
tags,
|
||||
):
|
||||
return True
|
||||
# Check if there is the specific region
|
||||
if region in allowlist["Accounts"][audited_account]["Checks"][check]["Regions"]:
|
||||
for elem in allowlist["Accounts"][audited_account]["Checks"][check][
|
||||
"Resources"
|
||||
]:
|
||||
if re.search(elem, resource):
|
||||
if is_allowlisted_in_tags(
|
||||
allowlist["Accounts"][audited_account]["Checks"][check],
|
||||
elem,
|
||||
resource,
|
||||
tags,
|
||||
):
|
||||
return True
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_allowlisted_in_tags(check_allowlist, elem, resource, tags):
|
||||
try:
|
||||
# Check if it is an *
|
||||
if elem == "*":
|
||||
elem = ".*"
|
||||
# Check if there are allowlisted tags
|
||||
if "Tags" in check_allowlist:
|
||||
# Check if there are resource tags
|
||||
if tags:
|
||||
tags_in_resource_tags = True
|
||||
for tag in check_allowlist["Tags"]:
|
||||
if tag not in tags:
|
||||
tags_in_resource_tags = False
|
||||
if tags_in_resource_tags and re.search(elem, resource):
|
||||
return True
|
||||
else:
|
||||
if re.search(elem, resource):
|
||||
return True
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import re
|
||||
|
||||
from arnparse import arnparse
|
||||
|
||||
from prowler.providers.aws.lib.arn.error import (
|
||||
@@ -43,3 +45,9 @@ def arn_parsing(arn):
|
||||
raise RoleArnParsingEmptyResource
|
||||
else:
|
||||
return arn_parsed
|
||||
|
||||
|
||||
def is_valid_arn(arn: str) -> bool:
|
||||
"""is_valid_arn returns True or False whether the given AWS ARN (Amazon Resource Name) is valid or not."""
|
||||
regex = r"^arn:aws(-cn|-us-gov)?:[a-zA-Z0-9\-]+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:[a-zA-Z0-9\-_\/]+(:\d+)?$"
|
||||
return re.match(regex, arn) is not None
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from boto3 import session
|
||||
from botocore.config import Config
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Audit_Info
|
||||
|
||||
@@ -9,6 +10,9 @@ current_audit_info = AWS_Audit_Info(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
# Default standard retrier config
|
||||
# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html
|
||||
session_config=Config(retries={"max_attempts": 3, "mode": "standard"}),
|
||||
audited_account=None,
|
||||
audited_user_id=None,
|
||||
audited_partition=None,
|
||||
@@ -21,6 +25,8 @@ current_audit_info = AWS_Audit_Info(
|
||||
session_duration=None,
|
||||
external_id=None,
|
||||
),
|
||||
audit_resources=None,
|
||||
audited_regions=None,
|
||||
organizations_metadata=None,
|
||||
audit_metadata=None,
|
||||
)
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
from boto3 import session
|
||||
from botocore.config import Config
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -32,6 +34,8 @@ class AWS_Organizations_Info:
|
||||
class AWS_Audit_Info:
|
||||
original_session: session.Session
|
||||
audit_session: session.Session
|
||||
# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html
|
||||
session_config: Config
|
||||
audited_account: int
|
||||
audited_identity_arn: str
|
||||
audited_user_id: str
|
||||
@@ -41,4 +45,6 @@ class AWS_Audit_Info:
|
||||
credentials: AWS_Credentials
|
||||
assumed_role_info: AWS_Assume_Role
|
||||
audited_regions: list
|
||||
audit_resources: list
|
||||
organizations_metadata: AWS_Organizations_Info
|
||||
audit_metadata: Optional[Any] = None
|
||||
|
||||