Compare commits
418 Commits
review_met
...
3.15.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e315bc10ba | ||
|
|
66a47190f8 | ||
|
|
8b8e1e2ca3 | ||
|
|
41373caad4 | ||
|
|
f14f8e7ec5 | ||
|
|
65edaeae8f | ||
|
|
e6efb2b716 | ||
|
|
ed60958014 | ||
|
|
85f45951e0 | ||
|
|
542386a3aa | ||
|
|
98f6564245 | ||
|
|
2319edf566 | ||
|
|
6a2171fa08 | ||
|
|
4857ff46f2 | ||
|
|
4b6eed0a66 | ||
|
|
0975c329c0 | ||
|
|
5c3e3dbda3 | ||
|
|
8242a882a3 | ||
|
|
6d825ced70 | ||
|
|
8e58a2be1f | ||
|
|
33884dbee5 | ||
|
|
c5dafcce43 | ||
|
|
97e59b2a25 | ||
|
|
75bcbd498d | ||
|
|
ee16a8ae1a | ||
|
|
fcb2df93b8 | ||
|
|
ddd43bae5d | ||
|
|
00ab5b5fc2 | ||
|
|
81b2ee4d06 | ||
|
|
3eeca73e50 | ||
|
|
7c939ff1e2 | ||
|
|
905ffc7540 | ||
|
|
b6d593c342 | ||
|
|
81b82cae0d | ||
|
|
98dea32288 | ||
|
|
f20319550c | ||
|
|
e4cfdb19ec | ||
|
|
60ed9d08d3 | ||
|
|
f91ccedc83 | ||
|
|
b4a05f4be0 | ||
|
|
d431877114 | ||
|
|
15b501d702 | ||
|
|
fea144a654 | ||
|
|
2ec4d59e29 | ||
|
|
06f988b8e5 | ||
|
|
eed4821d9b | ||
|
|
63c1d1b5d8 | ||
|
|
12381aeee0 | ||
|
|
64aa0435e8 | ||
|
|
ab437fb459 | ||
|
|
5ee1e0a9eb | ||
|
|
354677bc7a | ||
|
|
6d44eea11c | ||
|
|
f69f008dab | ||
|
|
f142b9adbb | ||
|
|
73733f674c | ||
|
|
bd05aaa4f9 | ||
|
|
ab14efa329 | ||
|
|
055a90df30 | ||
|
|
5161ccabe3 | ||
|
|
28e56c21fe | ||
|
|
f1770b4e5c | ||
|
|
d9b49a7234 | ||
|
|
59b0cc5be6 | ||
|
|
ebe0b7ecdb | ||
|
|
963861d2e6 | ||
|
|
587b8af870 | ||
|
|
88863b137f | ||
|
|
134c795f4b | ||
|
|
444ea22b8d | ||
|
|
9663ee6062 | ||
|
|
08bc86fa3d | ||
|
|
9227d6c02c | ||
|
|
75ef45b95a | ||
|
|
86a8eb74e1 | ||
|
|
7f27141ee4 | ||
|
|
be8a61b673 | ||
|
|
be234de6ad | ||
|
|
da1f266d1b | ||
|
|
9a22c2de8b | ||
|
|
7d3c6a4a5e | ||
|
|
753f32b4cb | ||
|
|
bdf3236350 | ||
|
|
d8a505b87c | ||
|
|
caf021a7a6 | ||
|
|
3776856a6c | ||
|
|
c9f87b907c | ||
|
|
ae378b6d50 | ||
|
|
f7afd7d1d6 | ||
|
|
c92a99baaf | ||
|
|
3c82d89aa4 | ||
|
|
69aedb8490 | ||
|
|
af00c5382b | ||
|
|
8e93493d2b | ||
|
|
ac439060a3 | ||
|
|
d6f28be8f2 | ||
|
|
d3946840de | ||
|
|
355f589e5a | ||
|
|
4740a7b930 | ||
|
|
cc71249e21 | ||
|
|
ccd9e27823 | ||
|
|
9f16e4dc81 | ||
|
|
eca7f7be61 | ||
|
|
409675e0c0 | ||
|
|
f9c839bfdc | ||
|
|
47e212ee17 | ||
|
|
042976fac3 | ||
|
|
5b45bbb1a5 | ||
|
|
9bb702076a | ||
|
|
8ed97810a8 | ||
|
|
c5af9605ee | ||
|
|
f5a18dce56 | ||
|
|
d14d8f5e02 | ||
|
|
eadc66f53b | ||
|
|
5f946d08cb | ||
|
|
3f7c37abb9 | ||
|
|
b60b48b948 | ||
|
|
68ecf939d9 | ||
|
|
a50d093679 | ||
|
|
740e829e4f | ||
|
|
f7051351ec | ||
|
|
a1018ad683 | ||
|
|
a912189e51 | ||
|
|
7298f64e5c | ||
|
|
fcf902eb1f | ||
|
|
89c71a068b | ||
|
|
8946145070 | ||
|
|
db15c0de9e | ||
|
|
643a918034 | ||
|
|
f21dcd8122 | ||
|
|
ac44d4a27b | ||
|
|
9c898c34f6 | ||
|
|
c0e0ddbc1c | ||
|
|
6c756ea52f | ||
|
|
0a413b6fd2 | ||
|
|
7ac7d9c9a8 | ||
|
|
7322d0bd30 | ||
|
|
469cc749d8 | ||
|
|
e91a694b46 | ||
|
|
4587a9f651 | ||
|
|
8c51094df1 | ||
|
|
c795d76fe9 | ||
|
|
c6e8a0b6d3 | ||
|
|
b23be4164f | ||
|
|
de77f3ff13 | ||
|
|
7c0ff1ff6a | ||
|
|
888cb92987 | ||
|
|
9a038f7bed | ||
|
|
b98f245bf2 | ||
|
|
e59b5caaf9 | ||
|
|
5a602d7adb | ||
|
|
14aa7a3f67 | ||
|
|
6e991107e7 | ||
|
|
622bce9c52 | ||
|
|
48587bd034 | ||
|
|
19d6352950 | ||
|
|
2c4b5c99ce | ||
|
|
15a194c9b0 | ||
|
|
e94e3cead9 | ||
|
|
ee2ed92fb5 | ||
|
|
db4579435a | ||
|
|
ae1ab1d957 | ||
|
|
a8edd03e65 | ||
|
|
8768b4cc31 | ||
|
|
cd9c192208 | ||
|
|
dcd97e7d26 | ||
|
|
8a6ae68b9a | ||
|
|
dff3e72e7d | ||
|
|
f0ac440146 | ||
|
|
7d7e5f4e1d | ||
|
|
a21dd4a2ed | ||
|
|
7f4e5bf435 | ||
|
|
dad590f070 | ||
|
|
f22b81fe3b | ||
|
|
68c1acbc7a | ||
|
|
e5412404ca | ||
|
|
5e733f6217 | ||
|
|
c830e4e399 | ||
|
|
c3ecd2b3e5 | ||
|
|
fd4d2db467 | ||
|
|
49b76ab050 | ||
|
|
c53f931d09 | ||
|
|
f344dbbc07 | ||
|
|
c617c10ffa | ||
|
|
4a15625bf9 | ||
|
|
c5def6d736 | ||
|
|
b232b675a7 | ||
|
|
6c03683c20 | ||
|
|
2da57db5a8 | ||
|
|
c7b794c1c4 | ||
|
|
5154cec7d2 | ||
|
|
e4cbb3c90e | ||
|
|
17f5cbeac2 | ||
|
|
90a4924508 | ||
|
|
d499053016 | ||
|
|
d343a67d6a | ||
|
|
8435ab48b0 | ||
|
|
27edf0f55a | ||
|
|
3d00554332 | ||
|
|
2631709abf | ||
|
|
4b0102b309 | ||
|
|
b9a24e0338 | ||
|
|
f127d4a8b1 | ||
|
|
73780682a1 | ||
|
|
9a1c034a51 | ||
|
|
94179f27ec | ||
|
|
6797b5a93d | ||
|
|
874a131ec9 | ||
|
|
641727ee0e | ||
|
|
f50075257c | ||
|
|
4d1de8f75c | ||
|
|
b76d0153eb | ||
|
|
f82789b99f | ||
|
|
89c789ce10 | ||
|
|
6dba54b028 | ||
|
|
d852cb4ed6 | ||
|
|
4c666fa1fe | ||
|
|
98adc1872d | ||
|
|
1df84ef6e4 | ||
|
|
80b88a9365 | ||
|
|
558b7a54c7 | ||
|
|
9522d0c733 | ||
|
|
396d6e5c0e | ||
|
|
a69d7471b3 | ||
|
|
eb56e1417c | ||
|
|
3d032a8efe | ||
|
|
d712470047 | ||
|
|
423f96b95f | ||
|
|
d1bd097079 | ||
|
|
ceabe8ecba | ||
|
|
0fff0568fa | ||
|
|
10e822238e | ||
|
|
1cf1c827f1 | ||
|
|
5bada440fa | ||
|
|
04bb95e044 | ||
|
|
819140bc59 | ||
|
|
d490bcc955 | ||
|
|
cb94960178 | ||
|
|
7361c10cb9 | ||
|
|
b47408e94e | ||
|
|
806a3590aa | ||
|
|
e953fe021d | ||
|
|
e570d94a6e | ||
|
|
78505cb0a8 | ||
|
|
f8d77d9a30 | ||
|
|
1a4887f028 | ||
|
|
71042b5919 | ||
|
|
435976800a | ||
|
|
18f4c7205b | ||
|
|
06eeefb8bf | ||
|
|
1737d7cf42 | ||
|
|
cd03fa6d46 | ||
|
|
a10a73962e | ||
|
|
99d6fee7a0 | ||
|
|
c8831f0f50 | ||
|
|
fdeb523581 | ||
|
|
9a868464ee | ||
|
|
051ec75e01 | ||
|
|
fc3909491a | ||
|
|
2437fe270c | ||
|
|
c937b193d0 | ||
|
|
8b5c995486 | ||
|
|
4410f2a582 | ||
|
|
bbb816868e | ||
|
|
2441cca810 | ||
|
|
3c3dfb380b | ||
|
|
0f165f0bf0 | ||
|
|
7fcff548eb | ||
|
|
8fa7b9ba00 | ||
|
|
b101e15985 | ||
|
|
b4e412a37f | ||
|
|
ac0e2bbdb2 | ||
|
|
ba16330e20 | ||
|
|
c9cb9774c6 | ||
|
|
7b5b14dbd0 | ||
|
|
bd13973cf5 | ||
|
|
a7f8656e89 | ||
|
|
1be52fab06 | ||
|
|
c9baff1a7f | ||
|
|
d1bc68086d | ||
|
|
44a4c0670b | ||
|
|
4785056740 | ||
|
|
694aa448a4 | ||
|
|
ee215b1ced | ||
|
|
018e87884c | ||
|
|
a81cbbc325 | ||
|
|
3962c9d816 | ||
|
|
e187875da5 | ||
|
|
f0d1a799a2 | ||
|
|
5452d535d7 | ||
|
|
7a776532a8 | ||
|
|
e704d57957 | ||
|
|
c9a6eb5a1a | ||
|
|
c071812160 | ||
|
|
3f95ad9ada | ||
|
|
250f59c9f5 | ||
|
|
c17bbea2c7 | ||
|
|
0262f8757a | ||
|
|
dbc2c481dc | ||
|
|
e432c39eec | ||
|
|
7383ae4f9c | ||
|
|
d217e33678 | ||
|
|
d1daceff91 | ||
|
|
dbbd556830 | ||
|
|
d483f1d90f | ||
|
|
80684a998f | ||
|
|
0c4f0fde48 | ||
|
|
071115cd52 | ||
|
|
9136a755fe | ||
|
|
6ff864fc04 | ||
|
|
828a6f4696 | ||
|
|
417aa550a6 | ||
|
|
78ffc2e238 | ||
|
|
c9f22db1b5 | ||
|
|
41da560b64 | ||
|
|
b49e0b95f7 | ||
|
|
50ef2729e6 | ||
|
|
6a901bb7de | ||
|
|
f0da63c850 | ||
|
|
b861c1dd3c | ||
|
|
45faa2e9e8 | ||
|
|
b2e1eed684 | ||
|
|
4018221da6 | ||
|
|
28ec3886f9 | ||
|
|
ed323f4602 | ||
|
|
f72d360384 | ||
|
|
682bba452b | ||
|
|
e2ce5ae2af | ||
|
|
039a0da69e | ||
|
|
c9ad12b87e | ||
|
|
094be2e2e6 | ||
|
|
1b3029d833 | ||
|
|
d00d5e863b | ||
|
|
3d19e89710 | ||
|
|
247cd6fc44 | ||
|
|
ba244c887f | ||
|
|
f77d92492a | ||
|
|
1b85af95c0 | ||
|
|
9236f5d058 | ||
|
|
39ba8cd230 | ||
|
|
e67328945f | ||
|
|
bcee2b0b6d | ||
|
|
be9a1b2f9a | ||
|
|
4f9c2aadc2 | ||
|
|
25d419ac7f | ||
|
|
57cfb508f1 | ||
|
|
c88445f90d | ||
|
|
9b6d6c3a42 | ||
|
|
d26c1405ce | ||
|
|
4bb35ab92d | ||
|
|
cdd983aa04 | ||
|
|
e83ce86eb3 | ||
|
|
bcc590a3ee | ||
|
|
5fdffb93d1 | ||
|
|
db20b2c04f | ||
|
|
4e037c0f43 | ||
|
|
fdcc2ac5cb | ||
|
|
9099bd79f8 | ||
|
|
a01683d8f6 | ||
|
|
6d2b2a9a93 | ||
|
|
de4166bf0d | ||
|
|
1cbef30788 | ||
|
|
89c6e27489 | ||
|
|
f74ffc530d | ||
|
|
441d4d6a38 | ||
|
|
3c6b9d63a6 | ||
|
|
254d8616b7 | ||
|
|
d3bc6fda74 | ||
|
|
e4a5d9376f | ||
|
|
523605e3e7 | ||
|
|
ed33fac337 | ||
|
|
bf0e62aca5 | ||
|
|
60c0b79b10 | ||
|
|
f9d2e7aa93 | ||
|
|
0646748e24 | ||
|
|
f6408e9df7 | ||
|
|
5769bc815c | ||
|
|
5a3e3e9b1f | ||
|
|
26cbafa204 | ||
|
|
d14541d1de | ||
|
|
3955ebd56c | ||
|
|
e212645cf0 | ||
|
|
db9c1c24d3 | ||
|
|
0a305c281f | ||
|
|
43c96a7875 | ||
|
|
3a93aba7d7 | ||
|
|
3d563356e5 | ||
|
|
9205ef30f8 | ||
|
|
19c2dccc6d | ||
|
|
8f819048ed | ||
|
|
3a3bb44f11 | ||
|
|
f8e713a544 | ||
|
|
573f1eba56 | ||
|
|
a36be258d8 | ||
|
|
690ec057c3 | ||
|
|
2681feb1f6 | ||
|
|
e662adb8c5 | ||
|
|
c94bd96c93 | ||
|
|
6d85433194 | ||
|
|
7a6092a779 | ||
|
|
4c84529aed | ||
|
|
512d3e018f | ||
|
|
c6aff985c9 | ||
|
|
7fadf31a2b | ||
|
|
e7d098ed1e | ||
|
|
21fba27355 | ||
|
|
74e37307f7 | ||
|
|
d9d7c009a5 | ||
|
|
2220cf9733 | ||
|
|
3325b72b86 | ||
|
|
9182d56246 | ||
|
|
299ece19a8 | ||
|
|
0a0732d7c0 | ||
|
|
28011d97a9 | ||
|
|
e71b0d1b6a | ||
|
|
ec01b62a82 | ||
|
|
12b45c6896 | ||
|
|
51c60dd4ee |
2
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: 💡 Feature Request
|
||||
description: Suggest an idea for this project
|
||||
labels: ["enhancement", "status/needs-triage"]
|
||||
labels: ["feature-request", "status/needs-triage"]
|
||||
|
||||
|
||||
body:
|
||||
|
||||
5
.github/dependabot.yml
vendored
@@ -13,3 +13,8 @@ updates:
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
target-branch: master
|
||||
|
||||
27
.github/labeler.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
documentation:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "docs/**"
|
||||
|
||||
provider/aws:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/aws/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/**"
|
||||
|
||||
provider/azure:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/azure/**"
|
||||
- any-glob-to-any-file: "tests/providers/azure/**"
|
||||
|
||||
provider/gcp:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/gcp/**"
|
||||
- any-glob-to-any-file: "tests/providers/gcp/**"
|
||||
|
||||
provider/kubernetes:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/kubernetes/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
24
.github/workflows/build-documentation-on-pr.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: Pull Request Documentation Link
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'prowler-4.0-dev'
|
||||
paths:
|
||||
- 'docs/**'
|
||||
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
|
||||
jobs:
|
||||
documentation-link:
|
||||
name: Documentation Link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Leave PR comment with the SaaS Documentation URI
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
body: |
|
||||
You can check the documentation for this PR here -> [SaaS Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
|
||||
14
.github/workflows/build-lint-push-containers.yml
vendored
@@ -32,11 +32,11 @@ jobs:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup python (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
@@ -52,13 +52,13 @@ jobs:
|
||||
poetry version ${{ github.event.release.tag_name }}
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
@@ -67,11 +67,11 @@ jobs:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
|
||||
10
.github/workflows/codeql.yml
vendored
@@ -13,10 +13,10 @@ name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", prowler-2, prowler-3.0-dev ]
|
||||
branches: [ "master", "prowler-4.0-dev" ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "master" ]
|
||||
branches: [ "master", "prowler-4.0-dev" ]
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
@@ -37,11 +37,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -52,6 +52,6 @@ jobs:
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
4
.github/workflows/find-secrets.yml
vendored
@@ -7,11 +7,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.4.4
|
||||
uses: trufflesecurity/trufflehog@v3.69.0
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
16
.github/workflows/labeler.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
name: "Pull Request Labeler"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- "master"
|
||||
- "prowler-4.0-dev"
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
13
.github/workflows/pull-request.yml
vendored
@@ -4,21 +4,23 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "prowler-4.0-dev"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "prowler-4.0-dev"
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10", "3.11"]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v39
|
||||
uses: tj-actions/changed-files@v42
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -26,6 +28,7 @@ jobs:
|
||||
README.md
|
||||
docs/**
|
||||
permissions/**
|
||||
mkdocs.yml
|
||||
- name: Install poetry
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
@@ -33,7 +36,7 @@ jobs:
|
||||
pipx install poetry
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
@@ -85,6 +88,6 @@ jobs:
|
||||
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@v3
|
||||
uses: codecov/codecov-action@v4
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
6
.github/workflows/pypi-release.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
name: Release Prowler to PyPI
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
- name: Install dependencies
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
cache: 'poetry'
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
poetry publish
|
||||
# Create pull request with new version
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
commit-message: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}."
|
||||
|
||||
@@ -23,12 +23,12 @@ jobs:
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
@@ -50,12 +50,12 @@ jobs:
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services."
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws"
|
||||
title: "chore(regions_update): Changes in regions for AWS services."
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
@@ -15,7 +15,7 @@ repos:
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.10.0
|
||||
rev: v2.12.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
@@ -28,7 +28,7 @@ repos:
|
||||
- id: shellcheck
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.2.0
|
||||
rev: v2.2.1
|
||||
hooks:
|
||||
- id: autoflake
|
||||
args:
|
||||
@@ -39,25 +39,25 @@ repos:
|
||||
]
|
||||
|
||||
- repo: https://github.com/timothycrosley/isort
|
||||
rev: 5.12.0
|
||||
rev: 5.13.2
|
||||
hooks:
|
||||
- id: isort
|
||||
args: ["--profile", "black"]
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.12.0
|
||||
rev: 24.1.1
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 6.1.0
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: contrib
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.6.0 # add version here
|
||||
rev: 1.7.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
- id: poetry-lock
|
||||
@@ -80,18 +80,12 @@ repos:
|
||||
- id: trufflehog
|
||||
name: TruffleHog
|
||||
description: Detect secrets in your data.
|
||||
# entry: bash -c 'trufflehog git file://. --only-verified --fail'
|
||||
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["commit", "push"]
|
||||
|
||||
- id: pytest-check
|
||||
name: pytest-check
|
||||
entry: bash -c 'pytest tests -n auto'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
|
||||
@@ -8,16 +8,18 @@ version: 2
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
tools:
|
||||
python: "3.9"
|
||||
python: "3.11"
|
||||
jobs:
|
||||
post_create_environment:
|
||||
# Install poetry
|
||||
# https://python-poetry.org/docs/#installing-manually
|
||||
- pip install poetry
|
||||
# Tell poetry to not use a virtual environment
|
||||
- poetry config virtualenvs.create false
|
||||
- python -m pip install poetry
|
||||
post_install:
|
||||
- poetry install -E docs
|
||||
# Install dependencies with 'docs' dependency group
|
||||
# https://python-poetry.org/docs/managing-dependencies/#dependency-groups
|
||||
# VIRTUAL_ENV needs to be set manually for now.
|
||||
# See https://github.com/readthedocs/readthedocs.org/pull/11152/
|
||||
- VIRTUAL_ENV=${READTHEDOCS_VIRTUALENV_PATH} python -m poetry install --only=docs
|
||||
|
||||
mkdocs:
|
||||
configuration: mkdocs.yml
|
||||
|
||||
@@ -55,7 +55,7 @@ further defined and clarified by project maintainers.
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at community@prowler.cloud. All
|
||||
reported by contacting the project team at [support.prowler.com](https://customer.support.prowler.com/servicedesk/customer/portals). All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
|
||||
2
LICENSE
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2018 Netflix, Inc.
|
||||
Copyright @ 2024 Toni de la Fuente
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
44
README.md
@@ -1,24 +1,31 @@
|
||||
<p align="center">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/62c1ce73bbcdd6b9e5ba03dfcae26dfd165defd9/docs/img/prowler-pro-dark.png?raw=True#gh-dark-mode-only" width="150" height="36">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/62c1ce73bbcdd6b9e5ba03dfcae26dfd165defd9/docs/img/prowler-pro-light.png?raw=True#gh-light-mode-only" width="15%" height="15%">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-black.png?raw=True#gh-light-mode-only" width="350" height="115">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png?raw=True#gh-dark-mode-only" width="350" height="115">
|
||||
</p>
|
||||
<p align="center">
|
||||
<b><i>See all the things you and your team can do with ProwlerPro at <a href="https://prowler.pro">prowler.pro</a></i></b>
|
||||
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
</p>
|
||||
<p align="center">
|
||||
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/3985464/3617e470-670c-47c9-9794-ce895ebdb627"></a>
|
||||
<br>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog">Join our Prowler community!</a>
|
||||
</p>
|
||||
|
||||
<hr>
|
||||
<p align="center">
|
||||
<img src="https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png" />
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
|
||||
<a href="https://pypi.org/project/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
|
||||
<a href="https://pypi.python.org/pypi/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
|
||||
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=prowler%20downloads"></a>
|
||||
<a href="https://pypistats.org/packages/prowler-cloud"><img alt="PyPI Prowler-Cloud Downloads" src="https://img.shields.io/pypi/dw/prowler-cloud.svg?label=prowler-cloud%20downloads"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/toniblyx/prowler"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/cloud/build/toniblyx/prowler"></a>
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/image-size/toniblyx/prowler"></a>
|
||||
<a href="https://gallery.ecr.aws/prowler-cloud/prowler"><img width="120" height=19" alt="AWS ECR Gallery" src="https://user-images.githubusercontent.com/3985464/151531396-b6535a68-c907-44eb-95a1-a09508178616.png"></a>
|
||||
<a href="https://codecov.io/gh/prowler-cloud/prowler"><img src="https://codecov.io/gh/prowler-cloud/prowler/graph/badge.svg?token=OflBGsdpDl"/></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
|
||||
@@ -30,6 +37,7 @@
|
||||
<a href="https://twitter.com/ToniBlyx"><img alt="Twitter" src="https://img.shields.io/twitter/follow/toniblyx?style=social"></a>
|
||||
<a href="https://twitter.com/prowlercloud"><img alt="Twitter" src="https://img.shields.io/twitter/follow/prowlercloud?style=social"></a>
|
||||
</p>
|
||||
<hr>
|
||||
|
||||
# Description
|
||||
|
||||
@@ -37,16 +45,16 @@
|
||||
|
||||
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks.
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.cloud/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.cloud/en/latest/tutorials/misc/#categories) |
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 301 | 61 -> `prowler aws --list-services` | 25 -> `prowler aws --list-compliance` | 5 -> `prowler aws --list-categories` |
|
||||
| AWS | 302 | 61 -> `prowler aws --list-services` | 27 -> `prowler aws --list-compliance` | 6 -> `prowler aws --list-categories` |
|
||||
| GCP | 73 | 11 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 23 | 4 -> `prowler azure --list-services` | CIS soon | 1 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | Planned | - | - | - |
|
||||
| Azure | 91 | 14 -> `prowler azure --list-services` | CIS soon | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | Work In Progress | - | CIS soon | - |
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
The full documentation can now be found at [https://docs.prowler.cloud](https://docs.prowler.cloud)
|
||||
The full documentation can now be found at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
|
||||
## Looking for Prowler v2 documentation?
|
||||
For Prowler v2 Documentation, please go to https://github.com/prowler-cloud/prowler/tree/2.12.1.
|
||||
@@ -54,13 +62,13 @@ For Prowler v2 Documentation, please go to https://github.com/prowler-cloud/prow
|
||||
# ⚙️ Install
|
||||
|
||||
## Pip package
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9:
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
|
||||
|
||||
```console
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
More details at https://docs.prowler.cloud
|
||||
More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
|
||||
## Containers
|
||||
|
||||
@@ -77,7 +85,7 @@ The container images are available here:
|
||||
|
||||
## From Github
|
||||
|
||||
Python >= 3.9 is required with pip and poetry:
|
||||
Python >= 3.9, < 3.13 is required with pip and poetry:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
@@ -178,11 +186,7 @@ Prowler will follow the same credentials search as [Google authentication librar
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
|
||||
|
||||
- Viewer
|
||||
- Security Reviewer
|
||||
- Stackdriver Account Viewer
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
|
||||
|
||||
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (F
|
||||
|
||||
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to help@prowler.pro.
|
||||
|
||||
The information you share with Verica as part of this process is kept confidential within Verica and the Prowler team. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
|
||||
The information you share with ProwlerPro as part of this process is kept confidential within ProwlerPro. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
|
||||
|
||||
We will review the submitted report, and assign it a tracking number. We will then respond to you, acknowledging receipt of the report, and outline the next steps in the process.
|
||||
|
||||
|
||||
@@ -101,7 +101,7 @@ All the checks MUST fill the `report.status` and `report.status_extended` with t
|
||||
|
||||
- Status -- `report.status`
|
||||
- `PASS` --> If the check is passing against the configured value.
|
||||
- `FAIL` --> If the check is passing against the configured value.
|
||||
- `FAIL` --> If the check is failing against the configured value.
|
||||
- `INFO` --> This value cannot be used unless a manual operation is required in order to determine if the `report.status` is whether `PASS` or `FAIL`.
|
||||
- Status Extended -- `report.status_extended`
|
||||
- MUST end in a dot `.`
|
||||
@@ -125,7 +125,7 @@ All the checks MUST fill the `report.resource_id` and `report.resource_arn` with
|
||||
- Resource ARN -- `report.resource_arn`
|
||||
- AWS Account --> Root ARN `arn:aws:iam::123456789012:root`
|
||||
- AWS Resource --> Resource ARN
|
||||
- Root resource --> Root ARN `arn:aws:iam::123456789012:root`
|
||||
- Root resource --> Resource Type ARN `f"arn:{service_client.audited_partition}:<service_name>:{service_client.region}:{service_client.audited_account}:<resource_type>"`
|
||||
- GCP
|
||||
- Resource ID -- `report.resource_id`
|
||||
- GCP Resource --> Resource ID
|
||||
@@ -196,14 +196,17 @@ aws:
|
||||
As you can see in the above code, within the service client, in this case the `ec2_client`, there is an object called `audit_config` which is a Python dictionary containing the values read from the configuration file.
|
||||
|
||||
In order to use it, you have to check first if the value is present in the configuration file. If the value is not present, you can create it in the `config.yaml` file and then, read it from the check.
|
||||
> It is mandatory to always use the `dictionary.get(value, default)` syntax to set a default value in the case the configuration value is not present.
|
||||
|
||||
???+ note
|
||||
It is mandatory to always use the `dictionary.get(value, default)` syntax to set a default value in the case the configuration value is not present.
|
||||
|
||||
|
||||
## Check Metadata
|
||||
|
||||
Each Prowler check has metadata associated which is stored at the same level of the check's folder in a file called A `check_name.metadata.json` containing the check's metadata.
|
||||
|
||||
> We are going to include comments in this example metadata JSON but they cannot be included because the JSON format does not allow comments.
|
||||
???+ note
|
||||
We are going to include comments in this example metadata JSON but they cannot be included because the JSON format does not allow comments.
|
||||
|
||||
```json
|
||||
{
|
||||
|
||||
45
docs/developer-guide/debugging.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# Debugging
|
||||
|
||||
Debugging in Prowler make things easier!
|
||||
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution. To do that, if you are using VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
|
||||
The following file is an example of the [debugging configuration](https://code.visualstudio.com/docs/editor/debugging#_launch-configurations) file that you can add to [Virtual Studio Code](https://code.visualstudio.com/).
|
||||
|
||||
This file should inside the *.vscode* folder and its name has to be *launch.json*:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Current File",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"aws",
|
||||
"-f",
|
||||
"eu-west-1",
|
||||
"--service",
|
||||
"cloudwatch",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-p",
|
||||
"dev",
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "Python: Debug Tests",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"program": "${file}",
|
||||
"purpose": [
|
||||
"debug-test"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
@@ -1,6 +1,6 @@
|
||||
# Developer Guide
|
||||
|
||||
You can extend Prowler in many different ways, in most cases you will want to create your own checks and compliance security frameworks, here is where you can learn about how to get started with it. We also include how to create custom outputs, integrations and more.
|
||||
You can extend Prowler Open Source in many different ways, in most cases you will want to create your own checks and compliance security frameworks, here is where you can learn about how to get started with it. We also include how to create custom outputs, integrations and more.
|
||||
|
||||
## Get the code and install all dependencies
|
||||
|
||||
@@ -16,7 +16,7 @@ pip install poetry
|
||||
```
|
||||
Then install all dependencies including the ones for developers:
|
||||
```
|
||||
poetry install
|
||||
poetry install --with dev
|
||||
poetry shell
|
||||
```
|
||||
|
||||
@@ -31,7 +31,9 @@ You should get an output like the following:
|
||||
pre-commit installed at .git/hooks/pre-commit
|
||||
```
|
||||
|
||||
Before we merge any of your pull requests we pass checks to the code, we use the following tools and automation to make sure the code is secure and dependencies up-to-dated (these should have been already installed if you ran `pipenv install -d`):
|
||||
Before we merge any of your pull requests we pass checks to the code, we use the following tools and automation to make sure the code is secure and dependencies up-to-dated:
|
||||
???+ note
|
||||
These should have been already installed if you ran `poetry install --with dev`
|
||||
|
||||
- [`bandit`](https://pypi.org/project/bandit/) for code security review.
|
||||
- [`safety`](https://pypi.org/project/safety/) and [`dependabot`](https://github.com/features/security) for dependencies.
|
||||
|
||||
@@ -23,7 +23,7 @@ Each file version of a framework will have the following structure at high level
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "<unique-id>",
|
||||
"Description": "Requiemente full description",
|
||||
"Description": "Requirement full description",
|
||||
"Checks": [
|
||||
"Here is the prowler check or checks that is going to be executed"
|
||||
],
|
||||
@@ -38,4 +38,4 @@ Each file version of a framework will have the following structure at high level
|
||||
}
|
||||
```
|
||||
|
||||
Finally, to have a proper output file for your reports, your framework data model has to be created in `prowler/lib/outputs/models.py` and also the CLI table output in `prowler/lib/outputs/compliance.py`.
|
||||
Finally, to have a proper output file for your reports, your framework data model has to be created in `prowler/lib/outputs/models.py` and also the CLI table output in `prowler/lib/outputs/compliance.py`. Also, you need to add a new conditional in `prowler/lib/outputs/file_descriptors.py` if you create a new CSV model.
|
||||
|
||||
@@ -40,13 +40,15 @@ Other commands to run tests:
|
||||
- Run tests for a provider service: `pytest -n auto -vvv -s -x tests/providers/<provider>/services/<service>`
|
||||
- Run tests for a provider check: `pytest -n auto -vvv -s -x tests/providers/<provider>/services/<service>/<check>`
|
||||
|
||||
> Refer to the [pytest documentation](https://docs.pytest.org/en/7.1.x/getting-started.html) documentation for more information.
|
||||
???+ note
|
||||
Refer to the [pytest documentation](https://docs.pytest.org/en/7.1.x/getting-started.html) documentation for more information.
|
||||
|
||||
## AWS
|
||||
|
||||
For the AWS provider we have ways to test a Prowler check based on the following criteria:
|
||||
|
||||
> Note: We use and contribute to the [Moto](https://github.com/getmoto/moto) library which allows us to easily mock out tests based on AWS infrastructure. **It's awesome!**
|
||||
???+ note
|
||||
We use and contribute to the [Moto](https://github.com/getmoto/moto) library which allows us to easily mock out tests based on AWS infrastructure. **It's awesome!**
|
||||
|
||||
- AWS API calls covered by [Moto](https://github.com/getmoto/moto):
|
||||
- Service tests with `@mock_<service>`
|
||||
@@ -195,7 +197,8 @@ class Test_iam_password_policy_uppercase:
|
||||
|
||||
If the IAM service for the check's we want to test is not covered by Moto, we have to inject the objects in the service client using [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock). As we have pointed above, we cannot instantiate the service since it will make real calls to the AWS APIs.
|
||||
|
||||
> The following example uses the IAM GetAccountPasswordPolicy which is covered by Moto but this is only for demonstration purposes.
|
||||
???+ note
|
||||
The following example uses the IAM GetAccountPasswordPolicy which is covered by Moto but this is only for demonstration purposes.
|
||||
|
||||
The following code shows how to use MagicMock to create the service objects.
|
||||
|
||||
@@ -325,7 +328,8 @@ class Test_iam_password_policy_uppercase:
|
||||
|
||||
Note that this does not use Moto, to keep it simple, but if you use any `moto`-decorators in addition to the patch, the call to `orig(self, operation_name, kwarg)` will be intercepted by Moto.
|
||||
|
||||
> The above code comes from here https://docs.getmoto.org/en/latest/docs/services/patching_other_services.html
|
||||
???+ note
|
||||
The above code comes from here https://docs.getmoto.org/en/latest/docs/services/patching_other_services.html
|
||||
|
||||
#### Mocking more than one service
|
||||
|
||||
@@ -385,7 +389,7 @@ with mock.patch(
|
||||
"prowler.providers.<provider>.lib.audit_info.audit_info.audit_info",
|
||||
new=audit_info,
|
||||
), mock.patch(
|
||||
"prowler.providers.aws.services.<service>.<check>.<check>.<service>_client",
|
||||
"prowler.providers.<provider>.services.<service>.<check>.<check>.<service>_client",
|
||||
new=<SERVICE>(audit_info),
|
||||
):
|
||||
```
|
||||
@@ -407,10 +411,10 @@ with mock.patch(
|
||||
"prowler.providers.<provider>.lib.audit_info.audit_info.audit_info",
|
||||
new=audit_info,
|
||||
), mock.patch(
|
||||
"prowler.providers.aws.services.<service>.<SERVICE>",
|
||||
"prowler.providers.<provider>.services.<service>.<SERVICE>",
|
||||
new=<SERVICE>(audit_info),
|
||||
) as service_client, mock.patch(
|
||||
"prowler.providers.aws.services.<service>.<service>_client.<service>_client",
|
||||
"prowler.providers.<provider>.services.<service>.<service>_client.<service>_client",
|
||||
new=service_client,
|
||||
):
|
||||
```
|
||||
@@ -523,7 +527,7 @@ from unittest import mock
|
||||
from uuid import uuid4
|
||||
|
||||
# Azure Constants
|
||||
AZURE_SUSCRIPTION = str(uuid4())
|
||||
from tests.providers.azure.azure_fixtures import AZURE_SUBSCRIPTION
|
||||
|
||||
|
||||
|
||||
@@ -542,7 +546,7 @@ class Test_defender_ensure_defender_for_arm_is_on:
|
||||
|
||||
# Create the custom Defender object to be tested
|
||||
defender_client.pricings = {
|
||||
AZURE_SUSCRIPTION: {
|
||||
AZURE_SUBSCRIPTION: {
|
||||
"Arm": Defender_Pricing(
|
||||
resource_id=resource_id,
|
||||
pricing_tier="Not Standard",
|
||||
@@ -580,9 +584,9 @@ class Test_defender_ensure_defender_for_arm_is_on:
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Defender plan Defender for ARM from subscription {AZURE_SUSCRIPTION} is set to OFF (pricing tier not standard)"
|
||||
== f"Defender plan Defender for ARM from subscription {AZURE_SUBSCRIPTION} is set to OFF (pricing tier not standard)"
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUSCRIPTION
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION
|
||||
assert result[0].resource_name == "Defender plan ARM"
|
||||
assert result[0].resource_id == resource_id
|
||||
```
|
||||
|
||||
@@ -5,7 +5,7 @@ Prowler has been written in Python using the [AWS SDK (Boto3)](https://boto3.ama
|
||||
|
||||
Since Prowler uses AWS Credentials under the hood, you can follow any authentication method as described [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-precedence).
|
||||
|
||||
### AWS Authentication
|
||||
### Authentication
|
||||
|
||||
Make sure you have properly configured your AWS-CLI with a valid Access Key and Region or declare AWS variables properly (or instance profile/role):
|
||||
|
||||
@@ -26,9 +26,8 @@ Those credentials must be associated to a user or role with proper permissions t
|
||||
- `arn:aws:iam::aws:policy/SecurityAudit`
|
||||
- `arn:aws:iam::aws:policy/job-function/ViewOnlyAccess`
|
||||
|
||||
> Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using.
|
||||
|
||||
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
|
||||
???+ note
|
||||
Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using. If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
|
||||
|
||||
### Multi-Factor Authentication
|
||||
|
||||
@@ -39,7 +38,7 @@ If your IAM entity enforces MFA you can use `--mfa` and Prowler will ask you to
|
||||
|
||||
## Azure
|
||||
|
||||
Prowler for azure supports the following authentication types:
|
||||
Prowler for Azure supports the following authentication types:
|
||||
|
||||
- Service principal authentication by environment variables (Enterprise Application)
|
||||
- Current az cli credentials stored
|
||||
@@ -63,7 +62,7 @@ The other three cases does not need additional configuration, `--az-cli-auth` an
|
||||
|
||||
### Permissions
|
||||
|
||||
To use each one you need to pass the proper flag to the execution. Prowler fro Azure handles two types of permission scopes, which are:
|
||||
To use each one you need to pass the proper flag to the execution. Prowler for Azure handles two types of permission scopes, which are:
|
||||
|
||||
- **Azure Active Directory permissions**: Used to retrieve metadata from the identity assumed by Prowler and future AAD checks (not mandatory to have access to execute the tool)
|
||||
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool.
|
||||
@@ -71,25 +70,51 @@ To use each one you need to pass the proper flag to the execution. Prowler fro A
|
||||
|
||||
#### Azure Active Directory scope
|
||||
|
||||
Azure Active Directory (AAD) permissions required by the tool are the following:
|
||||
Microsoft Entra ID (AAD earlier) permissions required by the tool are the following:
|
||||
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
|
||||
The best way to assign it is through the azure web console:
|
||||
The best way to assign it is through the Azure web console:
|
||||
|
||||
1. Access to Microsoft Entra ID
|
||||
2. In the left menu bar, go to "App registrations"
|
||||
3. Once there, in the menu bar click on "+ New registration" to register a new application
|
||||
4. Fill the "Name, select the "Supported account types" and click on "Register. You will be redirected to the applications page.
|
||||

|
||||
4. Select the new application
|
||||
5. In the left menu bar, select "API permissions"
|
||||
6. Then click on "+ Add a permission" and select "Microsoft Graph"
|
||||
7. Once in the "Microsoft Graph" view, select "Application permissions"
|
||||
8. Finally, search for "Directory" and "Policy" and select the following permissions:
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||

|
||||
|
||||

|
||||
|
||||
#### Subscriptions scope
|
||||
|
||||
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
|
||||
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
|
||||
|
||||
- `Security Reader`
|
||||
- `Reader`
|
||||
|
||||
To assign this roles, follow the instructions:
|
||||
|
||||
1. Access your subscription, then select your subscription.
|
||||
2. Select "Access control (IAM)".
|
||||
3. In the overview, select "Roles"
|
||||

|
||||
4. Click on "+ Add" and select "Add role assignment"
|
||||
5. In the search bar, type `Security Reader`, select it and click on "Next"
|
||||
6. In the Members tab, click on "+ Select members" and add the members you want to assign this role.
|
||||
7. Click on "Review + assign" to apply the new role.
|
||||
|
||||
*Repeat these steps for `Reader` role*
|
||||
|
||||
## Google Cloud
|
||||
|
||||
### GCP Authentication
|
||||
### Authentication
|
||||
|
||||
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
|
||||
|
||||
@@ -97,10 +122,7 @@ Prowler will follow the same credentials search as [Google authentication librar
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
|
||||
|
||||
- Viewer
|
||||
- Security Reviewer
|
||||
- Stackdriver Account Viewer
|
||||
|
||||
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||
???+ note
|
||||
By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||
|
||||
BIN
docs/img/page-IAM.png
Normal file
|
After Width: | Height: | Size: 348 KiB |
BIN
docs/img/prowler-logo-black.png
Normal file
|
After Width: | Height: | Size: 9.2 KiB |
BIN
docs/img/prowler-logo-white.png
Normal file
|
After Width: | Height: | Size: 11 KiB |
BIN
docs/img/register-application.png
Normal file
|
After Width: | Height: | Size: 302 KiB |
@@ -1,38 +1,13 @@
|
||||
<p href="https://github.com/prowler-cloud/prowler">
|
||||
<img align="right" src="./img/prowler-logo.png" height="100">
|
||||
</p>
|
||||
<br>
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure and Google Cloud security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
|
||||
|
||||
# Prowler Documentation
|
||||
|
||||
**Welcome to [Prowler Open Source v3](https://github.com/prowler-cloud/prowler/) Documentation!** 📄
|
||||
|
||||
For **Prowler v2 Documentation**, please go [here](https://github.com/prowler-cloud/prowler/tree/2.12.0) to the branch and its README.md.
|
||||
|
||||
- You are currently in the **Getting Started** section where you can find general information and requirements to help you start with the tool.
|
||||
- In the [Tutorials](./tutorials/misc.md) section you will see how to take advantage of all the features in Prowler.
|
||||
- In the [Contact Us](./contact.md) section you can find how to reach us out in case of technical issues.
|
||||
- In the [About](./about.md) section you will find more information about the Prowler team and license.
|
||||
|
||||
## About Prowler
|
||||
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure and Google Cloud security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
|
||||
|
||||
It contains hundreds of controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
|
||||
|
||||
[](https://twitter.com/prowlercloud)
|
||||
|
||||
## About ProwlerPro
|
||||
|
||||
<a href="https://prowler.pro"><img align="right" src="./img/prowler-pro-light.png" width="350"></a> **ProwlerPro** gives you the benefits of Prowler Open Source plus continuous monitoring, faster execution, personalized support, visualization of your data with dashboards, alerts and much more.
|
||||
Visit <a href="https://prowler.pro">prowler.pro</a> for more info.
|
||||

|
||||
|
||||
Prowler offers hundreds of controls covering more than 25 standards and compliance frameworks like CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
|
||||
|
||||
## Quick Start
|
||||
### Installation
|
||||
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with `Python >= 3.9`:
|
||||
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/), thus can be installed using pip with `Python >= 3.9`:
|
||||
|
||||
=== "Generic"
|
||||
|
||||
@@ -136,30 +111,21 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
|
||||
=== "AWS CloudShell"
|
||||
|
||||
Prowler can be easely executed in AWS CloudShell but it has some prerequsites to be able to to so. AWS CloudShell is a container running with `Amazon Linux release 2 (Karoo)` that comes with Python 3.7, since Prowler requires Python >= 3.9 we need to first install a newer version of Python. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [2](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* First install all dependences and then Python, in this case we need to compile it because there is not a package available at the time this document is written:
|
||||
```
|
||||
sudo yum -y install gcc openssl-devel bzip2-devel libffi-devel
|
||||
wget https://www.python.org/ftp/python/3.9.16/Python-3.9.16.tgz
|
||||
tar zxf Python-3.9.16.tgz
|
||||
cd Python-3.9.16/
|
||||
./configure --enable-optimizations
|
||||
sudo make altinstall
|
||||
python3.9 --version
|
||||
cd
|
||||
```
|
||||
* Open AWS CloudShell `bash`.
|
||||
|
||||
_Commands_:
|
||||
|
||||
* Once Python 3.9 is available we can install Prowler from pip:
|
||||
```
|
||||
pip3.9 install prowler
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
> To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/home/cloudshell-user/output/prowler-output-123456789012-20221220191331.csv`
|
||||
???+ note
|
||||
To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/home/cloudshell-user/output/prowler-output-123456789012-20221220191331.csv`
|
||||
|
||||
=== "Azure CloudShell"
|
||||
|
||||
@@ -194,14 +160,18 @@ You can run Prowler from your workstation, an EC2 instance, Fargate or any other
|
||||

|
||||
## Basic Usage
|
||||
|
||||
To run Prowler, you will need to specify the provider (e.g aws, gcp or azure):
|
||||
> If no provider specified, AWS will be used for backward compatibility with most of v2 options.
|
||||
To run Prowler, you will need to specify the provider (e.g `aws`, `gcp` or `azure`):
|
||||
|
||||
???+ note
|
||||
If no provider specified, AWS will be used for backward compatibility with most of v2 options.
|
||||
|
||||
```console
|
||||
prowler <provider>
|
||||
```
|
||||

|
||||
> Running the `prowler` command without options will use your environment variable credentials, see [Requirements](./getting-started/requirements.md) section to review the credentials settings.
|
||||
|
||||
???+ note
|
||||
Running the `prowler` command without options will use your environment variable credentials, see [Requirements](./getting-started/requirements.md) section to review the credentials settings.
|
||||
|
||||
If you miss the former output you can use `--verbose` but Prowler v3 is smoking fast, so you won't see much ;)
|
||||
|
||||
@@ -252,7 +222,9 @@ Use a custom AWS profile with `-p`/`--profile` and/or AWS regions which you want
|
||||
```console
|
||||
prowler aws --profile custom-profile -f us-east-1 eu-south-2
|
||||
```
|
||||
> By default, `prowler` will scan all AWS regions.
|
||||
|
||||
???+ note
|
||||
By default, `prowler` will scan all AWS regions.
|
||||
|
||||
See more details about AWS Authentication in [Requirements](getting-started/requirements.md)
|
||||
|
||||
@@ -302,3 +274,6 @@ prowler gcp --project-ids <Project ID 1> <Project ID 2> ... <Project ID N>
|
||||
```
|
||||
|
||||
See more details about GCP Authentication in [Requirements](getting-started/requirements.md)
|
||||
|
||||
## Prowler v2 Documentation
|
||||
For **Prowler v2 Documentation**, please check it out [here](https://github.com/prowler-cloud/prowler/blob/8818f47333a0c1c1a457453c87af0ea5b89a385f/README.md).
|
||||
|
||||
@@ -13,9 +13,9 @@ As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (F
|
||||
|
||||
## Reporting Vulnerabilities
|
||||
|
||||
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to help@prowler.pro.
|
||||
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or Prowler SaaS service, please submit the information by contacting to us via [**support.prowler.com**](http://support.prowler.com).
|
||||
|
||||
The information you share with Verica as part of this process is kept confidential within Verica and the Prowler team. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
|
||||
The information you share with the Prowler team as part of this process is kept confidential within Prowler. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
|
||||
|
||||
We will review the submitted report, and assign it a tracking number. We will then respond to you, acknowledging receipt of the report, and outline the next steps in the process.
|
||||
|
||||
|
||||
@@ -113,7 +113,8 @@ You will need to pass the S3 URI where your Allowlist YAML file was uploaded to
|
||||
```
|
||||
prowler aws -w s3://<bucket>/<prefix>/allowlist.yaml
|
||||
```
|
||||
> Make sure that the used AWS credentials have s3:GetObject permissions in the S3 path where the allowlist file is located.
|
||||
???+ note
|
||||
Make sure that the used AWS credentials have s3:GetObject permissions in the S3 path where the allowlist file is located.
|
||||
|
||||
### AWS DynamoDB Table ARN
|
||||
|
||||
@@ -138,7 +139,8 @@ The following example will allowlist all resources in all accounts for the EC2 c
|
||||
|
||||
<img src="../img/allowlist-row.png"/>
|
||||
|
||||
> Make sure that the used AWS credentials have `dynamodb:PartiQLSelect` permissions in the table.
|
||||
???+ note
|
||||
Make sure that the used AWS credentials have `dynamodb:PartiQLSelect` permissions in the table.
|
||||
|
||||
### AWS Lambda ARN
|
||||
|
||||
|
||||
@@ -19,9 +19,8 @@ Those credentials must be associated to a user or role with proper permissions t
|
||||
- `arn:aws:iam::aws:policy/SecurityAudit`
|
||||
- `arn:aws:iam::aws:policy/job-function/ViewOnlyAccess`
|
||||
|
||||
> Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using.
|
||||
|
||||
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
|
||||
???+ note
|
||||
Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using. If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
|
||||
|
||||
|
||||
## Profiles
|
||||
|
||||
@@ -32,3 +32,14 @@ Prowler's AWS Provider uses the Boto3 [Standard](https://boto3.amazonaws.com/v1/
|
||||
- Retry attempts on nondescriptive, transient error codes. Specifically, these HTTP status codes: 500, 502, 503, 504.
|
||||
|
||||
- Any retry attempt will include an exponential backoff by a base factor of 2 for a maximum backoff time of 20 seconds.
|
||||
|
||||
## Notes for validating retry attempts
|
||||
|
||||
If you are making changes to Prowler, and want to validate if requests are being retried or given up on, you can take the following approach
|
||||
|
||||
* Run prowler with `--log-level DEBUG` and `--log-file debuglogs.txt`
|
||||
* Search for retry attempts using `grep -i 'Retry needed' debuglogs.txt`
|
||||
|
||||
This is based off of the [AWS documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html#checking-retry-attempts-in-your-client-logs), which states that if a retry is performed, you will see a message starting with "Retry needed".
|
||||
|
||||
You can determine the total number of calls made using `grep -i 'Sending http request' debuglogs.txt | wc -l`
|
||||
|
||||
@@ -1,26 +1,26 @@
|
||||
# AWS CloudShell
|
||||
|
||||
Prowler can be easily executed in AWS CloudShell but it has some prerequisites to be able to to so. AWS CloudShell is a container running with `Amazon Linux release 2 (Karoo)` that comes with Python 3.7, since Prowler requires Python >= 3.9 we need to first install a newer version of Python. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
|
||||
|
||||
- First install all dependences and then Python, in this case we need to compile it because there is not a package available at the time this document is written:
|
||||
```
|
||||
sudo yum -y install gcc openssl-devel bzip2-devel libffi-devel
|
||||
wget https://www.python.org/ftp/python/3.9.16/Python-3.9.16.tgz
|
||||
tar zxf Python-3.9.16.tgz
|
||||
cd Python-3.9.16/
|
||||
./configure --enable-optimizations
|
||||
sudo make altinstall
|
||||
python3.9 --version
|
||||
cd
|
||||
```
|
||||
- Once Python 3.9 is available we can install Prowler from pip:
|
||||
```
|
||||
pip3.9 install prowler
|
||||
```
|
||||
- Now enjoy Prowler:
|
||||
```
|
||||
## Installation
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [[2]](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
|
||||
```shell
|
||||
pip install prowler
|
||||
prowler -v
|
||||
prowler
|
||||
```
|
||||
|
||||
- To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/home/cloudshell-user/output/prowler-output-123456789012-20221220191331.csv`
|
||||
## Download Files
|
||||
|
||||
To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/home/cloudshell-user/output/prowler-output-123456789012-20221220191331.csv`
|
||||
|
||||
## Clone Prowler from Github
|
||||
|
||||
The limited storage that AWS CloudShell provides for the user's home directory causes issues when installing the poetry dependencies to run Prowler from GitHub. Here is a workaround:
|
||||
```shell
|
||||
git clone https://github.com/prowler-cloud/prowler.git
|
||||
cd prowler
|
||||
pip install poetry
|
||||
mkdir /tmp/pypoetry
|
||||
poetry config cache-dir /tmp/pypoetry
|
||||
poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
```
|
||||
|
||||
BIN
docs/tutorials/aws/img/enable-2.png
Normal file
|
After Width: | Height: | Size: 341 KiB |
BIN
docs/tutorials/aws/img/enable-partner-integration-2.png
Normal file
|
After Width: | Height: | Size: 291 KiB |
BIN
docs/tutorials/aws/img/enable-partner-integration-3.png
Normal file
|
After Width: | Height: | Size: 306 KiB |
BIN
docs/tutorials/aws/img/enable-partner-integration-4.png
Normal file
|
After Width: | Height: | Size: 346 KiB |
BIN
docs/tutorials/aws/img/enable-partner-integration.png
Normal file
|
After Width: | Height: | Size: 293 KiB |
BIN
docs/tutorials/aws/img/enable.png
Normal file
|
After Width: | Height: | Size: 252 KiB |
BIN
docs/tutorials/aws/img/finding-details.png
Normal file
|
After Width: | Height: | Size: 603 KiB |
BIN
docs/tutorials/aws/img/findings.png
Normal file
|
After Width: | Height: | Size: 273 KiB |
@@ -1,21 +1,28 @@
|
||||
# AWS Organizations
|
||||
|
||||
## Get AWS Account details from your AWS Organization
|
||||
|
||||
Prowler allows you to get additional information of the scanned account in CSV and JSON outputs. When scanning a single account you get the Account ID as part of the output.
|
||||
Prowler allows you to get additional information of the scanned account from AWS Organizations.
|
||||
|
||||
If you have AWS Organizations Prowler can get your account details like Account Name, Email, ARN, Organization ID and Tags and you will have them next to every finding in the CSV and JSON outputs.
|
||||
If you have AWS Organizations enabled, Prowler can get your account details like account name, email, ARN, organization id and tags and you will have them next to every finding's output.
|
||||
|
||||
In order to do that you can use the option `-O`/`--organizations-role <organizations_role_arn>`. See the following sample command:
|
||||
In order to do that you can use the argument `-O`/`--organizations-role <organizations_role_arn>`. If this argument is not present Prowler will try to fetch that information automatically if the AWS account is a delegated administrator for the AWS Organization.
|
||||
|
||||
???+ note
|
||||
Refer [here](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_delegate_policies.html) for more information about AWS Organizations delegated administrator.
|
||||
|
||||
See the following sample command:
|
||||
|
||||
```shell
|
||||
prowler aws \
|
||||
-O arn:aws:iam::<management_organizations_account_id>:role/<role_name>
|
||||
```
|
||||
> Make sure the role in your AWS Organizations management account has the permissions `organizations:ListAccounts*` and `organizations:ListTagsForResource`.
|
||||
???+ note
|
||||
Make sure the role in your AWS Organizations management account has the permissions `organizations:DescribeAccount` and `organizations:ListTagsForResource`.
|
||||
|
||||
In that command Prowler will scan the account and getting the account details from the AWS Organizations management account assuming a role and creating two reports with those details in JSON and CSV.
|
||||
Prowler will scan the AWS account and get the account details from AWS Organizations.
|
||||
|
||||
In the JSON output below (redacted) you can see tags coded in base64 to prevent breaking CSV or JSON due to its format:
|
||||
In the JSON output below you can see tags coded in base64 to prevent breaking CSV or JSON due to its format:
|
||||
|
||||
```json
|
||||
"Account Email": "my-prod-account@domain.com",
|
||||
@@ -25,13 +32,15 @@ In the JSON output below (redacted) you can see tags coded in base64 to prevent
|
||||
"Account tags": "\"eyJUYWdzIjpasf0=\""
|
||||
```
|
||||
|
||||
The additional fields in CSV header output are as follow:
|
||||
The additional fields in CSV header output are as follows:
|
||||
|
||||
```csv
|
||||
ACCOUNT_DETAILS_EMAIL,ACCOUNT_DETAILS_NAME,ACCOUNT_DETAILS_ARN,ACCOUNT_DETAILS_ORG,ACCOUNT_DETAILS_TAGS
|
||||
```
|
||||
- ACCOUNT_DETAILS_EMAIL
|
||||
- ACCOUNT_DETAILS_NAME
|
||||
- ACCOUNT_DETAILS_ARN
|
||||
- ACCOUNT_DETAILS_ORG
|
||||
- ACCOUNT_DETAILS_TAGS
|
||||
|
||||
## Extra: run Prowler across all accounts in AWS Organizations by assuming roles
|
||||
## Extra: Run Prowler across all accounts in AWS Organizations by assuming roles
|
||||
|
||||
If you want to run Prowler across all accounts of AWS Organizations you can do this:
|
||||
|
||||
@@ -55,4 +64,6 @@ If you want to run Prowler across all accounts of AWS Organizations you can do t
|
||||
done
|
||||
```
|
||||
|
||||
> Using the same for loop it can be scanned a list of accounts with a variable like `ACCOUNTS_LIST='11111111111 2222222222 333333333'`
|
||||
???+ note
|
||||
Using the same for loop it can be scanned a list of accounts with a variable like:
|
||||
</br>`ACCOUNTS_LIST='11111111111 2222222222 333333333'`
|
||||
|
||||
@@ -6,10 +6,13 @@ By default Prowler is able to scan the following AWS partitions:
|
||||
- China: `aws-cn`
|
||||
- GovCloud (US): `aws-us-gov`
|
||||
|
||||
> To check the available regions for each partition and service please refer to the following document [aws_regions_by_service.json](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/aws_regions_by_service.json)
|
||||
???+ note
|
||||
To check the available regions for each partition and service please refer to the following document [aws_regions_by_service.json](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/aws_regions_by_service.json)
|
||||
|
||||
It is important to take into consideration that to scan the China (`aws-cn`) or GovCloud (`aws-us-gov`) partitions it is either required to have a valid region for that partition in your AWS credentials or to specify the regions you want to audit for that partition using the `-f/--region` flag.
|
||||
> Please, refer to https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials for more information about the AWS credentials configuration.
|
||||
|
||||
???+ note
|
||||
Please, refer to https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials for more information about the AWS credentials configuration.
|
||||
|
||||
Prowler can scan specific region(s) with:
|
||||
```console
|
||||
@@ -34,7 +37,8 @@ aws_access_key_id = XXXXXXXXXXXXXXXXXXX
|
||||
aws_secret_access_key = XXXXXXXXXXXXXXXXXXX
|
||||
region = cn-north-1
|
||||
```
|
||||
> With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
|
||||
???+ note
|
||||
With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
|
||||
|
||||
|
||||
## AWS GovCloud (US)
|
||||
@@ -52,7 +56,8 @@ aws_access_key_id = XXXXXXXXXXXXXXXXXXX
|
||||
aws_secret_access_key = XXXXXXXXXXXXXXXXXXX
|
||||
region = us-gov-east-1
|
||||
```
|
||||
> With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
|
||||
???+ note
|
||||
With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
|
||||
|
||||
|
||||
## AWS ISO (US & Europe)
|
||||
|
||||
@@ -23,13 +23,22 @@ prowler aws -R arn:aws:iam::<account_id>:role/<role_name>
|
||||
prowler aws -T/--session-duration <seconds> -I/--external-id <external_id> -R arn:aws:iam::<account_id>:role/<role_name>
|
||||
```
|
||||
|
||||
## Custom Role Session Name
|
||||
|
||||
Prowler can use your custom Role Session name with:
|
||||
```console
|
||||
prowler aws --role-session-name <role_session_name>
|
||||
```
|
||||
|
||||
???+ note
|
||||
It defaults to `ProwlerAssessmentSession`.
|
||||
|
||||
## STS Endpoint Region
|
||||
|
||||
If you are using Prowler in AWS regions that are not enabled by default you need to use the argument `--sts-endpoint-region` to point the AWS STS API calls `assume-role` and `get-caller-identity` to the non-default region, e.g.: `prowler aws --sts-endpoint-region eu-south-2`.
|
||||
|
||||
> Since v3.11.0, Prowler uses a regional token in STS sessions so it can scan all AWS regions without needing the `--sts-endpoint-region` argument.
|
||||
|
||||
> Make sure that you have enabled the AWS Region you want to scan in BOTH AWS Accounts (assumed role account and account from which you assume the role).
|
||||
???+ note
|
||||
Since v3.11.0, Prowler uses a regional token in STS sessions so it can scan all AWS regions without needing the `--sts-endpoint-region` argument. Make sure that you have enabled the AWS Region you want to scan in **BOTH** AWS Accounts (assumed role account and account from which you assume the role).
|
||||
|
||||
## Role MFA
|
||||
|
||||
@@ -42,6 +51,7 @@ If your IAM Role has MFA configured you can use `--mfa` along with `-R`/`--role
|
||||
|
||||
To create a role to be assumed in one or multiple accounts you can use either as CloudFormation Stack or StackSet the following [template](https://github.com/prowler-cloud/prowler/blob/master/permissions/create_role_to_assume_cfn.yaml) and adapt it.
|
||||
|
||||
> _NOTE 1 about Session Duration_: Depending on the amount of checks you run and the size of your infrastructure, Prowler may require more than 1 hour to finish. Use option `-T <seconds>` to allow up to 12h (43200 seconds). To allow more than 1h you need to modify _"Maximum CLI/API session duration"_ for that particular role, read more [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session).
|
||||
???+ note "About Session Duration"
|
||||
Depending on the amount of checks you run and the size of your infrastructure, Prowler may require more than 1 hour to finish. Use option `-T <seconds>` to allow up to 12h (43200 seconds). To allow more than 1h you need to modify _"Maximum CLI/API session duration"_ for that particular role, read more [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session).
|
||||
|
||||
> _NOTE 2 about Session Duration_: Bear in mind that if you are using roles assumed by role chaining there is a hard limit of 1 hour so consider not using role chaining if possible, read more about that, in foot note 1 below the table [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html).
|
||||
Bear in mind that if you are using roles assumed by role chaining there is a hard limit of 1 hour so consider not using role chaining if possible, read more about that, in foot note 1 below the table [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html).
|
||||
|
||||
@@ -21,6 +21,5 @@ By default Prowler sends HTML, JSON and CSV output formats, if you want to send
|
||||
prowler <provider> -M csv -B my-bucket
|
||||
```
|
||||
|
||||
> In the case you do not want to use the assumed role credentials but the initial credentials to put the reports into the S3 bucket, use `-D`/`--output-bucket-no-assume` instead of `-B`/`--output-bucket`.
|
||||
|
||||
> Make sure that the used credentials have `s3:PutObject` permissions in the S3 path where the reports are going to be uploaded.
|
||||
???+ note
|
||||
In the case you do not want to use the assumed role credentials but the initial credentials to put the reports into the S3 bucket, use `-D`/`--output-bucket-no-assume` instead of `-B`/`--output-bucket`. Make sure that the used credentials have `s3:PutObject` permissions in the S3 path where the reports are going to be uploaded.
|
||||
|
||||
@@ -1,61 +1,137 @@
|
||||
# AWS Security Hub Integration
|
||||
|
||||
Prowler supports natively and as **official integration** sending findings to [AWS Security Hub](https://aws.amazon.com/security-hub). This integration allows Prowler to import its findings to AWS Security Hub.
|
||||
Prowler supports natively and as **official integration** sending findings to [AWS Security Hub](https://aws.amazon.com/security-hub). This integration allows **Prowler** to import its findings to AWS Security Hub.
|
||||
|
||||
With Security Hub, you now have a single place that aggregates, organizes, and prioritizes your security alerts, or findings, from multiple AWS services, such as Amazon GuardDuty, Amazon Inspector, Amazon Macie, AWS Identity and Access Management (IAM) Access Analyzer, and AWS Firewall Manager, as well as from AWS Partner solutions and from Prowler for free.
|
||||
|
||||
Before sending findings to Prowler, you will need to perform next steps:
|
||||
Before sending findings, you will need to enable AWS Security Hub and the **Prowler** integration.
|
||||
|
||||
1. Since Security Hub is a region based service, enable it in the region or regions you require. Use the AWS Management Console or using the AWS CLI with this command if you have enough permissions:
|
||||
- `aws securityhub enable-security-hub --region <region>`.
|
||||
2. Enable Prowler as partner integration integration. Use the AWS Management Console or using the AWS CLI with this command if you have enough permissions:
|
||||
- `aws securityhub enable-import-findings-for-product --region <region> --product-arn arn:aws:securityhub:<region>::product/prowler/prowler` (change region also inside the ARN).
|
||||
- Using the AWS Management Console:
|
||||

|
||||
3. Allow Prowler to import its findings to AWS Security Hub by adding the policy below to the role or user running Prowler:
|
||||
- [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
|
||||
## Enable AWS Security Hub
|
||||
|
||||
To enable the integration you have to perform the following steps, in _at least_ one AWS region of a given AWS account, to enable **AWS Security Hub** and **Prowler** as a partner integration.
|
||||
|
||||
Since **AWS Security Hub** is a region based service, you will need to enable it in the region or regions you require. You can configure it using the AWS Management Console or the AWS CLI.
|
||||
|
||||
???+ note
|
||||
Take into account that enabling this integration will incur in costs in AWS Security Hub, please refer to its pricing [here](https://aws.amazon.com/security-hub/pricing/) for more information.
|
||||
|
||||
### Using the AWS Management Console
|
||||
|
||||
#### Enable AWS Security Hub
|
||||
|
||||
If you have currently AWS Security Hub enabled you can skip to the [next section](#enable-prowler-integration).
|
||||
|
||||
1. Open the **AWS Security Hub** console at https://console.aws.amazon.com/securityhub/.
|
||||
|
||||
2. When you open the Security Hub console for the first time make sure that you are in the region you want to enable, then choose **Go to Security Hub**.
|
||||

|
||||
|
||||
3. On the next page, the Security standards section lists the security standards that Security Hub supports. Select the check box for a standard to enable it, and clear the check box to disable it.
|
||||
|
||||
4. Choose **Enable Security Hub**.
|
||||

|
||||
|
||||
#### Enable Prowler Integration
|
||||
|
||||
If you have currently the Prowler integration enabled in AWS Security Hub you can skip to the [next section](#send-findings) and start sending findings.
|
||||
|
||||
Once **AWS Security Hub** is enabled you will need to enable **Prowler** as partner integration to allow **Prowler** to send findings to your **AWS Security Hub**.
|
||||
|
||||
1. Open the **AWS Security Hub** console at https://console.aws.amazon.com/securityhub/.
|
||||
|
||||
2. Select the **Integrations** tab in the right-side menu bar.
|
||||

|
||||
|
||||
3. Search for _Prowler_ in the text search box and the **Prowler** integration will appear.
|
||||
|
||||
4. Once there, click on **Accept Findings** to allow **AWS Security Hub** to receive findings from **Prowler**.
|
||||

|
||||
|
||||
5. A new modal will appear to confirm that you are enabling the **Prowler** integration.
|
||||

|
||||
|
||||
6. Right after click on **Accept Findings**, you will see that the integration is enabled in **AWS Security Hub**.
|
||||

|
||||
|
||||
### Using the AWS CLI
|
||||
|
||||
To enable **AWS Security Hub** and the **Prowler** integration you have to run the following commands using the AWS CLI:
|
||||
|
||||
```shell
|
||||
aws securityhub enable-security-hub --region <region>
|
||||
```
|
||||
???+ note
|
||||
For this command to work you will need the `securityhub:EnableSecurityHub` permission. You will need to set the AWS region where you want to enable AWS Security Hub.
|
||||
|
||||
Once **AWS Security Hub** is enabled you will need to enable **Prowler** as partner integration to allow **Prowler** to send findings to your AWS Security Hub. You have to run the following commands using the AWS CLI:
|
||||
|
||||
```shell
|
||||
aws securityhub enable-import-findings-for-product --region eu-west-1 --product-arn arn:aws:securityhub:<region>::product/prowler/prowler
|
||||
```
|
||||
???+ note
|
||||
You will need to set the AWS region where you want to enable the integration and also the AWS region also within the ARN. For this command to work you will need the `securityhub:securityhub:EnableImportFindingsForProduct` permission.
|
||||
|
||||
|
||||
## Send Findings
|
||||
Once it is enabled, it is as simple as running the command below (for all regions):
|
||||
|
||||
```sh
|
||||
prowler aws -S
|
||||
prowler aws --security-hub
|
||||
```
|
||||
|
||||
or for only one filtered region like eu-west-1:
|
||||
|
||||
```sh
|
||||
prowler -S -f eu-west-1
|
||||
prowler --security-hub --region eu-west-1
|
||||
```
|
||||
|
||||
> **Note 1**: It is recommended to send only fails to Security Hub and that is possible adding `-q` to the command.
|
||||
???+ note
|
||||
It is recommended to send only fails to Security Hub and that is possible adding `-q/--quiet` to the command. You can use, instead of the `-q/--quiet` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub.
|
||||
|
||||
> **Note 2**: Since Prowler perform checks to all regions by default you may need to filter by region when running Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f <region>` (if no region is used it will try to push findings in all regions hubs). Prowler will send findings to the Security Hub on the region where the scanned resource is located.
|
||||
Since Prowler perform checks to all regions by default you may need to filter by region when running Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f/--region <region>` (if no region is used it will try to push findings in all regions hubs). Prowler will send findings to the Security Hub on the region where the scanned resource is located.
|
||||
|
||||
> **Note 3**: To have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
|
||||
To have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
|
||||
|
||||
Once you run findings for first time you will be able to see Prowler findings in Findings section:
|
||||
### See you Prowler findings in AWS Security Hub
|
||||
|
||||

|
||||
Once configured the **AWS Security Hub** in your next scan you will receive the **Prowler** findings in the AWS regions configured. To review those findings in **AWS Security Hub**:
|
||||
|
||||
1. Open the **AWS Security Hub** console at https://console.aws.amazon.com/securityhub/.
|
||||
|
||||
2. Select the **Findings** tab in the right-side menu bar.
|
||||

|
||||
|
||||
3. Use the search box filters and use the **Product Name** filter with the value _Prowler_ to see the findings sent from **Prowler**.
|
||||
|
||||
4. Then, you can click on the check **Title** to see the details and the history of a finding.
|
||||

|
||||
|
||||
As you can see in the related requirements section, in the detailed view of the findings, **Prowler** also sends compliance information related to every finding.
|
||||
|
||||
## Send findings to Security Hub assuming an IAM Role
|
||||
|
||||
When you are auditing a multi-account AWS environment, you can send findings to a Security Hub of another account by assuming an IAM role from that account using the `-R` flag in the Prowler command:
|
||||
|
||||
```sh
|
||||
prowler -S -R arn:aws:iam::123456789012:role/ProwlerExecRole
|
||||
prowler --security-hub --role arn:aws:iam::123456789012:role/ProwlerExecutionRole
|
||||
```
|
||||
|
||||
> Remember that the used role needs to have permissions to send findings to Security Hub. To get more information about the permissions required, please refer to the following IAM policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
|
||||
???+ note
|
||||
Remember that the used role needs to have permissions to send findings to Security Hub. To get more information about the permissions required, please refer to the following IAM policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
|
||||
|
||||
|
||||
## Send only failed findings to Security Hub
|
||||
|
||||
When using Security Hub it is recommended to send only the failed findings generated. To follow that recommendation you could add the `-q` flag to the Prowler command:
|
||||
When using the **AWS Security Hub** integration you can send only the `FAIL` findings generated by **Prowler**. Therefore, the **AWS Security Hub** usage costs eventually would be lower. To follow that recommendation you could add the `-q/--quiet` flag to the Prowler command:
|
||||
|
||||
```sh
|
||||
prowler -S -q
|
||||
prowler --security-hub --quiet
|
||||
```
|
||||
|
||||
You can use, instead of the `-q/--quiet` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub:
|
||||
|
||||
```sh
|
||||
prowler --security-hub --send-sh-only-fails
|
||||
```
|
||||
|
||||
## Skip sending updates of findings to Security Hub
|
||||
|
||||
@@ -63,5 +139,5 @@ By default, Prowler archives all its findings in Security Hub that have not appe
|
||||
You can skip this logic by using the option `--skip-sh-update` so Prowler will not archive older findings:
|
||||
|
||||
```sh
|
||||
prowler -S --skip-sh-update
|
||||
prowler --security-hub --skip-sh-update
|
||||
```
|
||||
|
||||
16
docs/tutorials/azure/use-non-default-cloud.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# Use non default Azure regions
|
||||
|
||||
Microsoft provides clouds for compliance with regional laws, which are available for your use.
|
||||
By default, Prowler uses `AzureCloud` cloud which is the comercial one. (you can list all the available with `az cloud list --output table`).
|
||||
|
||||
At the time of writing this documentation the available Azure Clouds from different regions are the following:
|
||||
- AzureCloud
|
||||
- AzureChinaCloud
|
||||
- AzureUSGovernment
|
||||
- AzureGermanCloud
|
||||
|
||||
If you want to change the default one you must include the flag `--azure-region`, i.e.:
|
||||
|
||||
```console
|
||||
prowler azure --az-cli-auth --azure-region AzureChinaCloud
|
||||
```
|
||||
@@ -1,19 +1,19 @@
|
||||
# Check Aliases
|
||||
|
||||
Prowler allows you to use aliases for the checks. You only have to add the `CheckAliases` key to the check's metadata with a list of the aliases:
|
||||
|
||||
"Provider": "<provider>",
|
||||
"CheckID": "<check_id>",
|
||||
"CheckTitle": "<check_title>",
|
||||
"CheckAliases": [
|
||||
"<check_alias_1>"
|
||||
"<check_alias_2>",
|
||||
...
|
||||
],
|
||||
...
|
||||
|
||||
```json title="check.metadata.json"
|
||||
"Provider": "<provider>",
|
||||
"CheckID": "<check_id>",
|
||||
"CheckTitle": "<check_title>",
|
||||
"CheckAliases": [
|
||||
"<check_alias_1>"
|
||||
"<check_alias_2>",
|
||||
...
|
||||
],
|
||||
...
|
||||
```
|
||||
Then, you can execute the check either with its check ID or with one of the previous aliases:
|
||||
```console
|
||||
```shell
|
||||
prowler <provider> -c/--checks <check_alias_1>
|
||||
|
||||
Using alias <check_alias_1> for check <check_id>...
|
||||
|
||||
@@ -8,29 +8,36 @@ prowler <provider> --list-compliance
|
||||
```
|
||||
Currently, the available frameworks are:
|
||||
|
||||
- `cis_1.4_aws`
|
||||
- `cis_1.5_aws`
|
||||
- `ens_rd2022_aws`
|
||||
- `aws_account_security_onboarding_aws`
|
||||
- `aws_audit_manager_control_tower_guardrails_aws`
|
||||
- `aws_foundational_security_best_practices_aws`
|
||||
- `aws_well_architected_framework_reliability_pillar_aws`
|
||||
- `aws_well_architected_framework_security_pillar_aws`
|
||||
- `cis_1.4_aws`
|
||||
- `cis_1.5_aws`
|
||||
- `cis_2.0_aws`
|
||||
- `cis_2.0_gcp`
|
||||
- `cis_3.0_aws`
|
||||
- `cisa_aws`
|
||||
- `ens_rd2022_aws`
|
||||
- `fedramp_low_revision_4_aws`
|
||||
- `fedramp_moderate_revision_4_aws`
|
||||
- `ffiec_aws`
|
||||
- `foundational_technical_review_aws`
|
||||
- `gdpr_aws`
|
||||
- `gxp_eu_annex_11_aws`
|
||||
- `gxp_21_cfr_part_11_aws`
|
||||
- `gxp_eu_annex_11_aws`
|
||||
- `hipaa_aws`
|
||||
- `iso27001_2013_aws`
|
||||
- `mitre_attack_aws`
|
||||
- `nist_800_171_revision_2_aws`
|
||||
- `nist_800_53_revision_4_aws`
|
||||
- `nist_800_53_revision_5_aws`
|
||||
- `nist_800_171_revision_2_aws`
|
||||
- `nist_csf_1.1_aws`
|
||||
- `pci_3.2.1_aws`
|
||||
- `rbi_cyber_security_framework_aws`
|
||||
- `soc2_aws`
|
||||
|
||||
|
||||
## List Requirements of Compliance Frameworks
|
||||
For each compliance framework, you can use option `--list-compliance-requirements` to list its requirements:
|
||||
```sh
|
||||
|
||||
@@ -37,13 +37,24 @@ The following list includes all the AWS checks with configurable variables that
|
||||
## Azure
|
||||
|
||||
### Configurable Checks
|
||||
The following list includes all the Azure checks with configurable variables that can be changed in the configuration yaml file:
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
|
||||
| `network_public_ip_shodan` | `shodan_api_key` | String |
|
||||
| `app_ensure_php_version_is_latest` | `php_latest_version` | String |
|
||||
| `app_ensure_python_version_is_latest` | `python_latest_version` | String |
|
||||
| `app_ensure_java_version_is_latest` | `java_latest_version` | String |
|
||||
|
||||
|
||||
## GCP
|
||||
|
||||
### Configurable Checks
|
||||
|
||||
## Config YAML File Structure
|
||||
> This is the new Prowler configuration file format. The old one without provider keys is still compatible just for the AWS provider.
|
||||
|
||||
???+ note
|
||||
This is the new Prowler configuration file format. The old one without provider keys is still compatible just for the AWS provider.
|
||||
|
||||
```yaml title="config.yaml"
|
||||
# AWS Configuration
|
||||
@@ -126,8 +137,22 @@ aws:
|
||||
|
||||
# Azure Configuration
|
||||
azure:
|
||||
# Azure Network Configuration
|
||||
# azure.network_public_ip_shodan
|
||||
shodan_api_key: null
|
||||
|
||||
# Azure App Configuration
|
||||
# azure.app_ensure_php_version_is_latest
|
||||
php_latest_version: "8.2"
|
||||
# azure.app_ensure_python_version_is_latest
|
||||
python_latest_version: "3.12"
|
||||
# azure.app_ensure_java_version_is_latest
|
||||
java_latest_version: "17"
|
||||
|
||||
# GCP Configuration
|
||||
gcp:
|
||||
# GCP Compute Configuration
|
||||
# gcp.compute_public_address_shodan
|
||||
shodan_api_key: null
|
||||
|
||||
```
|
||||
|
||||
43
docs/tutorials/custom-checks-metadata.md
Normal file
@@ -0,0 +1,43 @@
|
||||
# Custom Checks Metadata
|
||||
|
||||
In certain organizations, the severity of specific checks might differ from the default values defined in the check's metadata. For instance, while `s3_bucket_level_public_access_block` could be deemed `critical` for some organizations, others might assign a different severity level.
|
||||
|
||||
The custom metadata option offers a means to override default metadata set by Prowler
|
||||
|
||||
You can utilize `--custom-checks-metadata-file` followed by the path to your custom checks metadata YAML file.
|
||||
|
||||
## Available Fields
|
||||
|
||||
The list of supported check's metadata fields that can be override are listed as follows:
|
||||
|
||||
- Severity
|
||||
|
||||
## File Syntax
|
||||
|
||||
This feature is available for all the providers supported in Prowler since the metadata format is common between all the providers. The following is the YAML format for the custom checks metadata file:
|
||||
```yaml title="custom_checks_metadata.yaml"
|
||||
CustomChecksMetadata:
|
||||
aws:
|
||||
Checks:
|
||||
s3_bucket_level_public_access_block:
|
||||
Severity: high
|
||||
s3_bucket_no_mfa_delete:
|
||||
Severity: high
|
||||
azure:
|
||||
Checks:
|
||||
storage_infrastructure_encryption_is_enabled:
|
||||
Severity: medium
|
||||
gcp:
|
||||
Checks:
|
||||
compute_instance_public_ip:
|
||||
Severity: critical
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Executing the following command will assess all checks and generate a report while overriding the metadata for those checks:
|
||||
```sh
|
||||
prowler <provider> --custom-checks-metadata-file <path/to/custom/metadata>
|
||||
```
|
||||
|
||||
This customization feature enables organizations to tailor the severity of specific checks based on their unique requirements, providing greater flexibility in security assessment and reporting.
|
||||
@@ -13,7 +13,8 @@ Otherwise, you can generate and download Service Account keys in JSON format (re
|
||||
prowler gcp --credentials-file path
|
||||
```
|
||||
|
||||
> `prowler` will scan the GCP project associated with the credentials.
|
||||
???+ note
|
||||
`prowler` will scan the GCP project associated with the credentials.
|
||||
|
||||
|
||||
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
|
||||
@@ -22,8 +23,4 @@ Prowler will follow the same credentials search as [Google authentication librar
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
|
||||
|
||||
- Viewer
|
||||
- Security Reviewer
|
||||
- Stackdriver Account Viewer
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# Ignore Unused Services
|
||||
|
||||
> Currently only available on the AWS provider.
|
||||
???+ note
|
||||
Currently only available on the AWS provider.
|
||||
|
||||
Prowler allows you to ignore unused services findings, so you can reduce the number of findings in Prowler's reports.
|
||||
|
||||
@@ -47,7 +48,7 @@ It is a best practice to encrypt both metadata and connection passwords in AWS G
|
||||
#### Inspector
|
||||
Amazon Inspector is a vulnerability discovery service that automates continuous scanning for security vulnerabilities within your Amazon EC2, Amazon ECR, and AWS Lambda environments. Prowler recommends to enable it and resolve all the Inspector's findings. Ignoring the unused services, Prowler will only notify you if there are any Lambda functions, EC2 instances or ECR repositories in the region where Amazon inspector should be enabled.
|
||||
|
||||
- `inspector2_findings_exist`
|
||||
- `inspector2_is_enabled`
|
||||
|
||||
#### Macie
|
||||
Amazon Macie is a security service that uses machine learning to automatically discover, classify and protect sensitive data in S3 buckets. Prowler will only create a finding when Macie is not enabled if there are S3 buckets in your account.
|
||||
|
||||
@@ -10,7 +10,9 @@ prowler <provider> --slack
|
||||
|
||||

|
||||
|
||||
> Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables.
|
||||
???+ note
|
||||
Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables.
|
||||
|
||||
### Configuration
|
||||
|
||||
To configure the Slack Integration, follow the next steps:
|
||||
|
||||
@@ -18,7 +18,8 @@ You can establish the log level of Prowler with `--log-level` option:
|
||||
prowler <provider> --log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL}
|
||||
```
|
||||
|
||||
> By default, Prowler will run with the `CRITICAL` log level, since critical errors will abort the execution.
|
||||
???+ note
|
||||
By default, Prowler will run with the `CRITICAL` log level, since critical errors will abort the execution.
|
||||
|
||||
## Export Logs to File
|
||||
|
||||
@@ -45,4 +46,5 @@ An example of a log file will be the following:
|
||||
"message": "eu-west-2 -- ClientError[124]: An error occurred (UnauthorizedOperation) when calling the DescribeNetworkAcls operation: You are not authorized to perform this operation."
|
||||
}
|
||||
|
||||
> NOTE: Each finding is represented as a `json` object.
|
||||
???+ note
|
||||
Each finding is represented as a `json` object.
|
||||
|
||||
@@ -61,21 +61,26 @@ Prowler allows you to include your custom checks with the flag:
|
||||
```console
|
||||
prowler <provider> -x/--checks-folder <custom_checks_folder>
|
||||
```
|
||||
> S3 URIs are also supported as folders for custom checks, e.g. s3://bucket/prefix/checks_folder/. Make sure that the used credentials have s3:GetObject permissions in the S3 path where the custom checks are located.
|
||||
|
||||
???+ note
|
||||
S3 URIs are also supported as folders for custom checks, e.g. `s3://bucket/prefix/checks_folder/`. Make sure that the used credentials have `s3:GetObject` permissions in the S3 path where the custom checks are located.
|
||||
|
||||
The custom checks folder must contain one subfolder per check, each subfolder must be named as the check and must contain:
|
||||
|
||||
- An empty `__init__.py`: to make Python treat this check folder as a package.
|
||||
- A `check_name.py` containing the check's logic.
|
||||
- A `check_name.metadata.json` containing the check's metadata.
|
||||
>The check name must start with the service name followed by an underscore (e.g., ec2_instance_public_ip).
|
||||
|
||||
???+ note
|
||||
The check name must start with the service name followed by an underscore (e.g., ec2_instance_public_ip).
|
||||
|
||||
To see more information about how to write checks see the [Developer Guide](../developer-guide/checks.md#create-a-new-check-for-a-provider).
|
||||
|
||||
> If you want to run ONLY your custom check(s), import it with -x (--checks-folder) and then run it with -c (--checks), e.g.:
|
||||
```console
|
||||
prowler aws -x s3://bucket/prowler/providers/aws/services/s3/s3_bucket_policy/ -c s3_bucket_policy
|
||||
```
|
||||
???+ note
|
||||
If you want to run ONLY your custom check(s), import it with -x (--checks-folder) and then run it with -c (--checks), e.g.:
|
||||
```console
|
||||
prowler aws -x s3://bucket/prowler/providers/aws/services/s3/s3_bucket_policy/ -c s3_bucket_policy
|
||||
```
|
||||
|
||||
## Severities
|
||||
Each of Prowler's checks has a severity, which can be:
|
||||
|
||||
188
docs/tutorials/parallel-execution.md
Normal file
@@ -0,0 +1,188 @@
|
||||
# Parallel Execution
|
||||
|
||||
The strategy used here will be to execute Prowler once per service. You can modify this approach as per your requirements.
|
||||
|
||||
This can help for really large accounts, but please be aware of AWS API rate limits:
|
||||
|
||||
1. **Service-Specific Limits**: Each AWS service has its own rate limits. For instance, Amazon EC2 might have different rate limits for launching instances versus making API calls to describe instances.
|
||||
2. **API Rate Limits**: Most of the rate limits in AWS are applied at the API level. Each API call to an AWS service counts towards the rate limit for that service.
|
||||
3. **Throttling Responses**: When you exceed the rate limit for a service, AWS responds with a throttling error. In AWS SDKs, these are typically represented as `ThrottlingException` or `RateLimitExceeded` errors.
|
||||
|
||||
For information on Prowler's retrier configuration please refer to this [page](https://docs.prowler.cloud/en/latest/tutorials/aws/boto3-configuration/).
|
||||
|
||||
???+ note
|
||||
You might need to increase the `--aws-retries-max-attempts` parameter from the default value of 3. The retrier follows an exponential backoff strategy.
|
||||
|
||||
## Linux
|
||||
|
||||
Generate a list of services that Prowler supports, and populate this info into a file:
|
||||
|
||||
```bash
|
||||
prowler aws --list-services | awk -F"- " '{print $2}' | sed '/^$/d' > services
|
||||
```
|
||||
|
||||
Make any modifications for services you would like to skip scanning by modifying this file.
|
||||
|
||||
Then create a new PowerShell script file `parallel-prowler.sh` and add the following contents. Update the `$profile` variable to the AWS CLI profile you want to run Prowler with.
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
|
||||
# Change these variables as needed
|
||||
profile="your_profile"
|
||||
account_id=$(aws sts get-caller-identity --profile "${profile}" --query 'Account' --output text)
|
||||
|
||||
echo "Executing in account: ${account_id}"
|
||||
|
||||
# Maximum number of concurrent processes
|
||||
MAX_PROCESSES=5
|
||||
|
||||
# Loop through the services
|
||||
while read service; do
|
||||
echo "$(date '+%Y-%m-%d %H:%M:%S'): Starting job for service: ${service}"
|
||||
|
||||
# Run the command in the background
|
||||
(prowler -p "$profile" -s "$service" -F "${account_id}-${service}" --ignore-unused-services --only-logs; echo "$(date '+%Y-%m-%d %H:%M:%S') - ${service} has completed") &
|
||||
|
||||
# Check if we have reached the maximum number of processes
|
||||
while [ $(jobs -r | wc -l) -ge ${MAX_PROCESSES} ]; do
|
||||
# Wait for a second before checking again
|
||||
sleep 1
|
||||
done
|
||||
done < ./services
|
||||
|
||||
# Wait for all background processes to finish
|
||||
wait
|
||||
echo "All jobs completed"
|
||||
```
|
||||
|
||||
Output will be stored in the `output/` folder that is in the same directory from which you executed the script.
|
||||
|
||||
## Windows
|
||||
|
||||
Generate a list of services that Prowler supports, and populate this info into a file:
|
||||
|
||||
```powershell
|
||||
prowler aws --list-services | ForEach-Object {
|
||||
# Capture lines that are likely service names
|
||||
if ($_ -match '^\- \w+$') {
|
||||
$_.Trim().Substring(2)
|
||||
}
|
||||
} | Where-Object {
|
||||
# Filter out empty or null lines
|
||||
$_ -ne $null -and $_ -ne ''
|
||||
} | Set-Content -Path "services"
|
||||
```
|
||||
|
||||
Make any modifications for services you would like to skip scanning by modifying this file.
|
||||
|
||||
Then create a new PowerShell script file `parallel-prowler.ps1` and add the following contents. Update the `$profile` variable to the AWS CLI profile you want to run prowler with.
|
||||
|
||||
Change any parameters you would like when calling prowler in the `Start-Job -ScriptBlock` section. Note that you need to keep the `--only-logs` parameter, else some encoding issue occurs when trying to render the progress-bar and prowler won't successfully execute.
|
||||
|
||||
```powershell
|
||||
$profile = "your_profile"
|
||||
$account_id = Invoke-Expression -Command "aws sts get-caller-identity --profile $profile --query 'Account' --output text"
|
||||
|
||||
Write-Host "Executing Prowler in $account_id"
|
||||
|
||||
# Maximum number of concurrent jobs
|
||||
$MAX_PROCESSES = 5
|
||||
|
||||
# Read services from a file
|
||||
$services = Get-Content -Path "services"
|
||||
|
||||
# Array to keep track of started jobs
|
||||
$jobs = @()
|
||||
|
||||
foreach ($service in $services) {
|
||||
# Start the command as a job
|
||||
$job = Start-Job -ScriptBlock {
|
||||
prowler -p ${using:profile} -s ${using:service} -F "${using:account_id}-${using:service}" --ignore-unused-services --only-logs
|
||||
$endTimestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
|
||||
Write-Output "${endTimestamp} - $using:service has completed"
|
||||
}
|
||||
$jobs += $job
|
||||
Write-Host "$(Get-Date -Format 'yyyy-MM-dd HH:mm:ss') - Starting job for service: $service"
|
||||
|
||||
# Check if we have reached the maximum number of jobs
|
||||
while (($jobs | Where-Object { $_.State -eq 'Running' }).Count -ge $MAX_PROCESSES) {
|
||||
Start-Sleep -Seconds 1
|
||||
# Check for any completed jobs and receive their output
|
||||
$completedJobs = $jobs | Where-Object { $_.State -eq 'Completed' }
|
||||
foreach ($completedJob in $completedJobs) {
|
||||
Receive-Job -Job $completedJob -Keep | ForEach-Object { Write-Host $_ }
|
||||
$jobs = $jobs | Where-Object { $_.Id -ne $completedJob.Id }
|
||||
Remove-Job -Job $completedJob
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Check for any remaining completed jobs
|
||||
$remainingCompletedJobs = $jobs | Where-Object { $_.State -eq 'Completed' }
|
||||
foreach ($remainingJob in $remainingCompletedJobs) {
|
||||
Receive-Job -Job $remainingJob -Keep | ForEach-Object { Write-Host $_ }
|
||||
Remove-Job -Job $remainingJob
|
||||
}
|
||||
|
||||
Write-Host "$(Get-Date -Format 'yyyy-MM-dd HH:mm:ss') - All jobs completed"
|
||||
```
|
||||
|
||||
Output will be stored in `C:\Users\YOUR-USER\Documents\output\`
|
||||
|
||||
## Combining the output files
|
||||
|
||||
Guidance is provided for the CSV file format. From the ouput directory, execute either the following Bash or PowerShell script. The script will collect the output from the CSV files, only include the header from the first file, and then output the result as CombinedCSV.csv in the current working directory.
|
||||
|
||||
There is no logic implemented in terms of which CSV files it will combine. If you have additional CSV files from other actions, such as running a quick inventory, you will need to move that out of the current (or any nested) directory, or move the output you want to combine into its own folder and run the script from there.
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
|
||||
# Initialize a variable to indicate the first file
|
||||
firstFile=true
|
||||
|
||||
# Find all CSV files and loop through them
|
||||
find . -name "*.csv" -print0 | while IFS= read -r -d '' file; do
|
||||
if [ "$firstFile" = true ]; then
|
||||
# For the first file, keep the header
|
||||
cat "$file" > CombinedCSV.csv
|
||||
firstFile=false
|
||||
else
|
||||
# For subsequent files, skip the header
|
||||
tail -n +2 "$file" >> CombinedCSV.csv
|
||||
fi
|
||||
done
|
||||
```
|
||||
|
||||
```powershell
|
||||
# Get all CSV files from current directory and its subdirectories
|
||||
$csvFiles = Get-ChildItem -Recurse -Filter "*.csv"
|
||||
|
||||
# Initialize a variable to track if it's the first file
|
||||
$firstFile = $true
|
||||
|
||||
# Loop through each CSV file
|
||||
foreach ($file in $csvFiles) {
|
||||
if ($firstFile) {
|
||||
# For the first file, keep the header and change the flag
|
||||
$combinedCsv = Import-Csv -Path $file.FullName
|
||||
$firstFile = $false
|
||||
} else {
|
||||
# For subsequent files, skip the header
|
||||
$tempCsv = Import-Csv -Path $file.FullName
|
||||
$combinedCsv += $tempCsv | Select-Object * -Skip 1
|
||||
}
|
||||
}
|
||||
|
||||
# Export the combined data to a new CSV file
|
||||
$combinedCsv | Export-Csv -Path "CombinedCSV.csv" -NoTypeInformation
|
||||
```
|
||||
|
||||
## TODO: Additional Improvements
|
||||
|
||||
Some services need to instantiate another service to perform a check. For instance, `cloudwatch` will instantiate Prowler's `iam` service to perform the `cloudwatch_cross_account_sharing_disabled` check. When the `iam` service is instantiated, it will perform the `__init__` function, and pull all the information required for that service. This provides an opportunity for an improvement in the above script to group related services together so that the `iam` services (or any other cross-service references) isn't repeatedily instantiated by grouping dependant services together. A complete mapping between these services still needs to be further investigated, but these are the cross-references that have been noted:
|
||||
|
||||
* inspector2 needs lambda and ec2
|
||||
* cloudwatch needs iam
|
||||
* dlm needs ec2
|
||||
@@ -50,6 +50,7 @@ Several checks analyse resources that are exposed to the Internet, these are:
|
||||
- sagemaker_notebook_instance_without_direct_internet_access_configured
|
||||
- sns_topics_not_publicly_accessible
|
||||
- sqs_queues_not_publicly_accessible
|
||||
- network_public_ip_shodan
|
||||
|
||||
...
|
||||
|
||||
@@ -61,8 +62,17 @@ prowler <provider> --categories internet-exposed
|
||||
|
||||
### Shodan
|
||||
|
||||
Prowler allows you check if any elastic ip in your AWS Account is exposed in Shodan with `-N`/`--shodan <shodan_api_key>` option:
|
||||
Prowler allows you check if any public IPs in your Cloud environments are exposed in Shodan with `-N`/`--shodan <shodan_api_key>` option:
|
||||
|
||||
For example, you can check if any of your AWS EC2 instances has an elastic IP exposed in shodan:
|
||||
```console
|
||||
prowler aws --shodan <shodan_api_key> -c ec2_elastic_ip_shodan
|
||||
prowler aws -N/--shodan <shodan_api_key> -c ec2_elastic_ip_shodan
|
||||
```
|
||||
Also, you can check if any of your Azure Subscription has an public IP exposed in shodan:
|
||||
```console
|
||||
prowler azure -N/--shodan <shodan_api_key> -c network_public_ip_shodan
|
||||
```
|
||||
And finally, you can check if any of your GCP projects has an public IP address exposed in shodan:
|
||||
```console
|
||||
prowler gcp -N/--shodan <shodan_api_key> -c compute_public_address_shodan
|
||||
```
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
# Quick Inventory
|
||||
|
||||
Prowler allows you to execute a quick inventory to extract the number of resources in your provider.
|
||||
> Currently, it is only available for AWS provider.
|
||||
|
||||
???+ note
|
||||
Currently, it is only available for AWS provider.
|
||||
|
||||
|
||||
- You can use option `-i`/`--quick-inventory` to execute it:
|
||||
```sh
|
||||
prowler <provider> -i
|
||||
```
|
||||
> By default, it extracts resources from all the regions, you could use `-f`/`--filter-region` to specify the regions to execute the analysis.
|
||||
|
||||
???+ note
|
||||
By default, it extracts resources from all the regions, you could use `-f`/`--filter-region` to specify the regions to execute the analysis.
|
||||
|
||||
- This feature specify both the number of resources for each service and for each resource type.
|
||||
|
||||
|
||||
@@ -19,11 +19,12 @@ prowler <provider> -M csv json json-asff html -F <custom_report_name>
|
||||
```console
|
||||
prowler <provider> -M csv json json-asff html -o <custom_report_directory>
|
||||
```
|
||||
> Both flags can be used simultaneously to provide a custom directory and filename.
|
||||
```console
|
||||
prowler <provider> -M csv json json-asff html \
|
||||
-F <custom_report_name> -o <custom_report_directory>
|
||||
```
|
||||
???+ note
|
||||
Both flags can be used simultaneously to provide a custom directory and filename.
|
||||
```console
|
||||
prowler <provider> -M csv json json-asff html \
|
||||
-F <custom_report_name> -o <custom_report_directory>
|
||||
```
|
||||
## Output timestamp format
|
||||
By default, the timestamp format of the output files is ISO 8601. This can be changed with the flag `--unix-timestamp` generating the timestamp fields in pure unix timestamp format.
|
||||
|
||||
@@ -41,50 +42,74 @@ Hereunder is the structure for each of the supported report formats by Prowler:
|
||||
|
||||
### HTML
|
||||

|
||||
|
||||
### CSV
|
||||
|
||||
The following are the columns present in the CSV format:
|
||||
CSV format has a set of common columns for all the providers, and then provider specific columns.
|
||||
The common columns are the following:
|
||||
|
||||
- ASSESSMENT_START_TIME
|
||||
- FINDING_UNIQUE_ID
|
||||
- PROVIDER
|
||||
- CHECK_ID
|
||||
- CHECK_TITLE
|
||||
- CHECK_TYPE
|
||||
- STATUS
|
||||
- STATUS_EXTENDED
|
||||
- SERVICE_NAME
|
||||
- SUBSERVICE_NAME
|
||||
- SEVERITY
|
||||
- RESOURCE_TYPE
|
||||
- RESOURCE_DETAILS
|
||||
- RESOURCE_TAGS
|
||||
- DESCRIPTION
|
||||
- RISK
|
||||
- RELATED_URL
|
||||
- REMEDIATION_RECOMMENDATION_TEXT
|
||||
- REMEDIATION_RECOMMENDATION_URL
|
||||
- REMEDIATION_RECOMMENDATION_CODE_NATIVEIAC
|
||||
- REMEDIATION_RECOMMENDATION_CODE_TERRAFORM
|
||||
- REMEDIATION_RECOMMENDATION_CODE_CLI
|
||||
- REMEDIATION_RECOMMENDATION_CODE_OTHER
|
||||
- COMPLIANCE
|
||||
- CATEGORIES
|
||||
- DEPENDS_ON
|
||||
- RELATED_TO
|
||||
- NOTES
|
||||
|
||||
And then by the provider specific columns:
|
||||
|
||||
#### AWS
|
||||
|
||||
- PROFILE
|
||||
- ACCOUNT_ID
|
||||
- ACCOUNT_NAME
|
||||
- ACCOUNT_EMAIL
|
||||
- ACCOUNT_ARN
|
||||
- ACCOUNT_ORG
|
||||
- ACCOUNT_TAGS
|
||||
- REGION
|
||||
- CHECK_ID
|
||||
- CHECK_TITLE
|
||||
- CHECK_TYPE
|
||||
- STATUS
|
||||
- STATUS_EXTENDED
|
||||
- SERVICE_NAME
|
||||
- SUBSERVICE_NAME
|
||||
- SEVERITY
|
||||
- RESOURCE_ID
|
||||
- RESOURCE_ARN
|
||||
- RESOURCE_TYPE
|
||||
- RESOURCE_DETAILS
|
||||
- RESOURCE_TAGS
|
||||
- DESCRIPTION
|
||||
- COMPLIANCE
|
||||
- RISK
|
||||
- RELATED_URL
|
||||
- REMEDIATION_RECOMMENDATION_TEXT
|
||||
- REMEDIATION_RECOMMENDATION_URL
|
||||
- REMEDIATION_RECOMMENDATION_CODE_NATIVEIAC
|
||||
- REMEDIATION_RECOMMENDATION_CODE_TERRAFORM
|
||||
- REMEDIATION_RECOMMENDATION_CODE_CLI
|
||||
- REMEDIATION_RECOMMENDATION_CODE_OTHER
|
||||
- CATEGORIES
|
||||
- DEPENDS_ON
|
||||
- RELATED_TO
|
||||
- NOTES
|
||||
- ACCOUNT_NAME
|
||||
- ACCOUNT_EMAIL
|
||||
- ACCOUNT_ARN
|
||||
- ACCOUNT_ORG
|
||||
- ACCOUNT_TAGS
|
||||
- REGION
|
||||
- RESOURCE_ID
|
||||
- RESOURCE_ARN
|
||||
|
||||
#### AZURE
|
||||
|
||||
- TENANT_DOMAIN
|
||||
- SUBSCRIPTION
|
||||
- RESOURCE_ID
|
||||
- RESOURCE_NAME
|
||||
|
||||
#### GCP
|
||||
|
||||
- PROJECT_ID
|
||||
- LOCATION
|
||||
- RESOURCE_ID
|
||||
- RESOURCE_NAME
|
||||
|
||||
|
||||
???+ note
|
||||
Since Prowler v3 the CSV column delimiter is the semicolon (`;`)
|
||||
|
||||
> Since Prowler v3 the CSV column delimiter is the semicolon (`;`)
|
||||
### JSON
|
||||
|
||||
The following code is an example output of the JSON format:
|
||||
@@ -181,7 +206,8 @@ The following code is an example output of the JSON format:
|
||||
}]
|
||||
```
|
||||
|
||||
> NOTE: Each finding is a `json` object within a list. This has changed in v3 since in v2 the format used was [ndjson](http://ndjson.org/).
|
||||
???+ note
|
||||
Each finding is a `json` object within a list. This has changed in v3 since in v2 the format used was [ndjson](http://ndjson.org/).
|
||||
|
||||
|
||||
### JSON-OCSF
|
||||
@@ -442,7 +468,9 @@ Based on [Open Cybersecurity Schema Framework Security Finding v1.0.0-rc.3](http
|
||||
}]
|
||||
```
|
||||
|
||||
> NOTE: Each finding is a `json` object.
|
||||
???+ note
|
||||
Each finding is a `json` object.
|
||||
|
||||
### JSON-ASFF
|
||||
|
||||
The following code is an example output of the [JSON-ASFF](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html) format:
|
||||
@@ -575,4 +603,5 @@ The following code is an example output of the [JSON-ASFF](https://docs.aws.amaz
|
||||
}]
|
||||
```
|
||||
|
||||
> NOTE: Each finding is a `json` object within a list.
|
||||
???+ note
|
||||
Each finding is a `json` object within a list.
|
||||
|
||||
32
mkdocs.yml
@@ -1,15 +1,15 @@
|
||||
# Project information
|
||||
site_name: Prowler Documentation
|
||||
site_url: https://docs.prowler.pro/
|
||||
site_name: Prowler Open Source Documentation
|
||||
site_url: https://docs.prowler.com/
|
||||
site_description: >-
|
||||
Prowler Documentation Site
|
||||
Prowler Open Source Documentation
|
||||
|
||||
# Theme Configuration
|
||||
theme:
|
||||
language: en
|
||||
logo: img/prowler-logo.png
|
||||
logo: img/prowler-logo-white.png
|
||||
name: material
|
||||
favicon: img/prowler-icon.svg
|
||||
favicon: favicon.ico
|
||||
features:
|
||||
- navigation.tabs
|
||||
- navigation.tabs.sticky
|
||||
@@ -19,6 +19,11 @@ theme:
|
||||
primary: black
|
||||
accent: green
|
||||
|
||||
plugins:
|
||||
- search
|
||||
- git-revision-date-localized:
|
||||
enable_creation_date: true
|
||||
|
||||
edit_uri: "https://github.com/prowler-cloud/prowler/tree/master/docs"
|
||||
# Prowler OSS Repository
|
||||
repo_url: https://github.com/prowler-cloud/prowler/
|
||||
@@ -38,8 +43,10 @@ nav:
|
||||
- Logging: tutorials/logging.md
|
||||
- Allowlist: tutorials/allowlist.md
|
||||
- Check Aliases: tutorials/check-aliases.md
|
||||
- Custom Metadata: tutorials/custom-checks-metadata.md
|
||||
- Ignore Unused Services: tutorials/ignore-unused-services.md
|
||||
- Pentesting: tutorials/pentesting.md
|
||||
- Parallel Execution: tutorials/parallel-execution.md
|
||||
- Developer Guide: developer-guide/introduction.md
|
||||
- AWS:
|
||||
- Authentication: tutorials/aws/authentication.md
|
||||
@@ -56,6 +63,7 @@ nav:
|
||||
- Boto3 Configuration: tutorials/aws/boto3-configuration.md
|
||||
- Azure:
|
||||
- Authentication: tutorials/azure/authentication.md
|
||||
- Non default clouds: tutorials/azure/use-non-default-cloud.md
|
||||
- Subscriptions: tutorials/azure/subscriptions.md
|
||||
- Google Cloud:
|
||||
- Authentication: tutorials/gcp/authentication.md
|
||||
@@ -71,11 +79,13 @@ nav:
|
||||
- Testing:
|
||||
- Unit Tests: developer-guide/unit-testing.md
|
||||
- Integration Tests: developer-guide/integration-testing.md
|
||||
- Debugging: developer-guide/debugging.md
|
||||
- Security: security.md
|
||||
- Contact Us: contact.md
|
||||
- Troubleshooting: troubleshooting.md
|
||||
- About: about.md
|
||||
- ProwlerPro: https://prowler.pro
|
||||
- Prowler SaaS: https://prowler.com
|
||||
|
||||
# Customization
|
||||
extra:
|
||||
consent:
|
||||
@@ -99,11 +109,15 @@ extra:
|
||||
link: https://twitter.com/prowlercloud
|
||||
|
||||
# Copyright
|
||||
copyright: Copyright © 2022 Toni de la Fuente, Maintained by the Prowler Team at Verica, Inc.</a>
|
||||
copyright: >
|
||||
Copyright © <script>document.write(new Date().getFullYear())</script> Toni de la Fuente, Maintained by the Prowler Team at ProwlerPro, Inc.</a>
|
||||
</br><a href="#__consent">Change cookie settings</a>
|
||||
|
||||
markdown_extensions:
|
||||
- abbr
|
||||
- admonition
|
||||
- pymdownx.details
|
||||
- pymdownx.superfences
|
||||
- attr_list
|
||||
- def_list
|
||||
- footnotes
|
||||
@@ -117,8 +131,8 @@ markdown_extensions:
|
||||
- pymdownx.caret
|
||||
- pymdownx.details
|
||||
- pymdownx.emoji:
|
||||
emoji_generator: !!python/name:materialx.emoji.to_svg
|
||||
emoji_index: !!python/name:materialx.emoji.twemoji
|
||||
emoji_index: !!python/name:material.extensions.emoji.twemoji
|
||||
emoji_generator: !!python/name:material.extensions.emoji.to_svg
|
||||
- pymdownx.highlight:
|
||||
anchor_linenums: true
|
||||
- pymdownx.inlinehilite
|
||||
|
||||
3759
poetry.lock
generated
@@ -26,6 +26,10 @@ from prowler.lib.check.check import (
|
||||
)
|
||||
from prowler.lib.check.checks_loader import load_checks_to_execute
|
||||
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
|
||||
from prowler.lib.check.custom_checks_metadata import (
|
||||
parse_custom_checks_metadata_file,
|
||||
update_checks_metadata,
|
||||
)
|
||||
from prowler.lib.cli.parser import ProwlerArgumentParser
|
||||
from prowler.lib.logger import logger, set_logging_config
|
||||
from prowler.lib.outputs.compliance import display_compliance_table
|
||||
@@ -67,6 +71,7 @@ def prowler():
|
||||
checks_folder = args.checks_folder
|
||||
severities = args.severity
|
||||
compliance_framework = args.compliance
|
||||
custom_checks_metadata_file = args.custom_checks_metadata_file
|
||||
|
||||
if not args.no_banner:
|
||||
print_banner(args)
|
||||
@@ -96,9 +101,19 @@ def prowler():
|
||||
|
||||
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
# Complete checks metadata with the compliance framework specification
|
||||
update_checks_metadata_with_compliance(
|
||||
bulk_checks_metadata = update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks, bulk_checks_metadata
|
||||
)
|
||||
# Update checks metadata if the --custom-checks-metadata-file is present
|
||||
custom_checks_metadata = None
|
||||
if custom_checks_metadata_file:
|
||||
custom_checks_metadata = parse_custom_checks_metadata_file(
|
||||
provider, custom_checks_metadata_file
|
||||
)
|
||||
bulk_checks_metadata = update_checks_metadata(
|
||||
bulk_checks_metadata, custom_checks_metadata
|
||||
)
|
||||
|
||||
if args.list_compliance:
|
||||
print_compliance_frameworks(bulk_compliance_frameworks)
|
||||
sys.exit()
|
||||
@@ -174,7 +189,11 @@ def prowler():
|
||||
findings = []
|
||||
if len(checks_to_execute):
|
||||
findings = execute_checks(
|
||||
checks_to_execute, provider, audit_info, audit_output_options
|
||||
checks_to_execute,
|
||||
provider,
|
||||
audit_info,
|
||||
audit_output_options,
|
||||
custom_checks_metadata,
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
@@ -246,7 +265,10 @@ def prowler():
|
||||
for region in security_hub_regions:
|
||||
# Save the regions where AWS Security Hub is enabled
|
||||
if verify_security_hub_integration_enabled_per_region(
|
||||
region, audit_info.audit_session
|
||||
audit_info.audited_partition,
|
||||
region,
|
||||
audit_info.audit_session,
|
||||
audit_info.audited_account,
|
||||
):
|
||||
aws_security_enabled_regions.append(region)
|
||||
|
||||
|
||||
1190
prowler/compliance/aws/aws_account_security_onboarding_aws.json
Normal file
@@ -468,27 +468,6 @@
|
||||
},
|
||||
{
|
||||
"Id": "2.1.1",
|
||||
"Description": "Ensure all S3 buckets employ encryption-at-rest",
|
||||
"Checks": [
|
||||
"s3_bucket_default_encryption"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2.1. Simple Storage Service (S3)",
|
||||
"Profile": "Level 2",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "Amazon S3 provides a variety of no, or low, cost encryption options to protect data at rest.",
|
||||
"RationaleStatement": "Encrypting data at rest reduces the likelihood that it is unintentionally exposed and can nullify the impact of disclosure if the encryption remains unbroken.",
|
||||
"ImpactStatement": "Amazon S3 buckets with default bucket encryption using SSE-KMS cannot be used as destination buckets for Amazon S3 server access logging. Only SSE-S3 default encryption is supported for server access log destination buckets.",
|
||||
"RemediationProcedure": "**From Console:** 1. Login to AWS Management Console and open the Amazon S3 console using https://console.aws.amazon.com/s3/ 2. Select a Bucket. 3. Click on 'Properties'. 4. Click edit on `Default Encryption`. 5. Select either `AES-256`, `AWS-KMS`, `SSE-KMS` or `SSE-S3`. 6. Click `Save` 7. Repeat for all the buckets in your AWS account lacking encryption. **From Command Line:** Run either ``` aws s3api put-bucket-encryption --bucket <bucket name> --server-side-encryption-configuration '{\"Rules\": [{\"ApplyServerSideEncryptionByDefault\": {\"SSEAlgorithm\": \"AES256\"}}]}' ``` or ``` aws s3api put-bucket-encryption --bucket <bucket name> --server-side-encryption-configuration '{\"Rules\": [{\"ApplyServerSideEncryptionByDefault\": {\"SSEAlgorithm\": \"aws:kms\",\"KMSMasterKeyID\": \"aws/s3\"}}]}' ``` **Note:** the KMSMasterKeyID can be set to the master key of your choosing; aws/s3 is an AWS preconfigured default.",
|
||||
"AuditProcedure": "**From Console:** 1. Login to AWS Management Console and open the Amazon S3 console using https://console.aws.amazon.com/s3/ 2. Select a Bucket. 3. Click on 'Properties'. 4. Verify that `Default Encryption` is enabled, and displays either `AES-256`, `AWS-KMS`, `SSE-KMS` or `SSE-S3`. 5. Repeat for all the buckets in your AWS account. **From Command Line:** 1. Run command to list buckets ``` aws s3 ls ``` 2. For each bucket, run ``` aws s3api get-bucket-encryption --bucket <bucket name> ``` 3. Verify that either ``` \"SSEAlgorithm\": \"AES256\" ``` or ``` \"SSEAlgorithm\": \"aws:kms\"``` is displayed.",
|
||||
"AdditionalInformation": "S3 bucket encryption only applies to objects as they are placed in the bucket. Enabling S3 bucket encryption does **not** encrypt objects previously stored within the bucket.",
|
||||
"References": "https://docs.aws.amazon.com/AmazonS3/latest/user-guide/default-bucket-encryption.html:https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-encryption.html#bucket-encryption-related-resources"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "2.1.2",
|
||||
"Description": "Ensure S3 Bucket Policy is set to deny HTTP requests",
|
||||
"Checks": [
|
||||
"s3_bucket_secure_transport_policy"
|
||||
@@ -509,7 +488,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "2.1.3",
|
||||
"Id": "2.1.2",
|
||||
"Description": "Ensure MFA Delete is enabled on S3 buckets",
|
||||
"Checks": [
|
||||
"s3_bucket_no_mfa_delete"
|
||||
@@ -530,7 +509,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "2.1.4",
|
||||
"Id": "2.1.3",
|
||||
"Description": "Ensure all data in Amazon S3 has been discovered, classified and secured when required.",
|
||||
"Checks": [
|
||||
"macie_is_enabled"
|
||||
@@ -551,7 +530,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "2.1.5",
|
||||
"Id": "2.1.4",
|
||||
"Description": "Ensure that S3 Buckets are configured with 'Block public access (bucket settings)'",
|
||||
"Checks": [
|
||||
"s3_bucket_level_public_access_block",
|
||||
|
||||
1317
prowler/compliance/aws/cis_3.0_aws.json
Normal file
@@ -211,6 +211,31 @@
|
||||
"iam_avoid_root_usage"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "op.acc.4.aws.iam.8",
|
||||
"Description": "Proceso de gestión de derechos de acceso",
|
||||
"Attributes": [
|
||||
{
|
||||
"IdGrupoControl": "op.acc.4",
|
||||
"Marco": "operacional",
|
||||
"Categoria": "control de acceso",
|
||||
"DescripcionControl": "Se restringirá todo acceso a las acciones especificadas para el usuario root de una cuenta.",
|
||||
"Nivel": "alto",
|
||||
"Tipo": "requisito",
|
||||
"Dimensiones": [
|
||||
"confidencialidad",
|
||||
"integridad",
|
||||
"trazabilidad",
|
||||
"autenticidad"
|
||||
],
|
||||
"ModoEjecucion": "automático"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"organizations_account_part_of_organizations",
|
||||
"organizations_scp_check_deny_regions"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "op.acc.4.aws.iam.9",
|
||||
"Description": "Proceso de gestión de derechos de acceso",
|
||||
@@ -789,7 +814,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"inspector2_findings_exist"
|
||||
"inspector2_is_enabled",
|
||||
"inspector2_active_findings_exist"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -1121,6 +1147,30 @@
|
||||
"cloudtrail_insights_exist"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "op.exp.8.r1.aws.ct.3",
|
||||
"Description": "Revisión de los registros",
|
||||
"Attributes": [
|
||||
{
|
||||
"IdGrupoControl": "op.exp.8.r1",
|
||||
"Marco": "operacional",
|
||||
"Categoria": "explotación",
|
||||
"DescripcionControl": "Registrar los eventos de lectura y escritura de datos.",
|
||||
"Nivel": "alto",
|
||||
"Tipo": "refuerzo",
|
||||
"Dimensiones": [
|
||||
"trazabilidad"
|
||||
],
|
||||
"ModoEjecucion": "automático"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_insights_exist"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "op.exp.8.r1.aws.ct.4",
|
||||
"Description": "Revisión de los registros",
|
||||
@@ -1233,6 +1283,33 @@
|
||||
"iam_role_cross_service_confused_deputy_prevention"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "op.exp.8.r4.aws.ct.1",
|
||||
"Description": "Control de acceso",
|
||||
"Attributes": [
|
||||
{
|
||||
"IdGrupoControl": "op.exp.8.r4",
|
||||
"Marco": "operacional",
|
||||
"Categoria": "explotación",
|
||||
"DescripcionControl": "Asignar correctamente las políticas AWS IAM para el acceso y borrado de los registros y sus copias de seguridad haciendo uso del principio de mínimo privilegio.",
|
||||
"Nivel": "alto",
|
||||
"Tipo": "refuerzo",
|
||||
"Dimensiones": [
|
||||
"trazabilidad"
|
||||
],
|
||||
"ModoEjecucion": "automático"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_allows_privilege_escalation",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_unattached_policy_no_administrative_privilege",
|
||||
"iam_no_custom_policy_permissive_role_assumption",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_role_cross_service_confused_deputy_prevention",
|
||||
"iam_policy_no_full_access_to_cloudtrail"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "op.exp.8.r4.aws.ct.2",
|
||||
"Description": "Control de acceso",
|
||||
@@ -1859,7 +1936,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"inspector2_findings_exist"
|
||||
"inspector2_is_enabled",
|
||||
"inspector2_active_findings_exist"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -1934,7 +2012,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"inspector2_findings_exist"
|
||||
"inspector2_is_enabled",
|
||||
"inspector2_active_findings_exist"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -2110,7 +2189,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"networkfirewall_in_all_vpc"
|
||||
"fms_policy_compliant"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -2251,6 +2330,31 @@
|
||||
"cloudfront_distributions_https_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "mp.com.4.aws.ws.1",
|
||||
"Description": "Separación de flujos de información en la red",
|
||||
"Attributes": [
|
||||
{
|
||||
"IdGrupoControl": "mp.com.4",
|
||||
"Marco": "medidas de protección",
|
||||
"Categoria": "segregación de redes",
|
||||
"DescripcionControl": "Se deberán abrir solo los puertos necesarios para el uso del servicio AWS WorkSpaces.",
|
||||
"Nivel": "alto",
|
||||
"Tipo": "requisito",
|
||||
"Dimensiones": [
|
||||
"confidencialidad",
|
||||
"integridad",
|
||||
"trazabilidad",
|
||||
"autenticidad",
|
||||
"disponibilidad"
|
||||
],
|
||||
"ModoEjecucion": "automático"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"workspaces_vpc_2private_1public_subnets_nat"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "mp.com.4.aws.vpc.1",
|
||||
"Description": "Separación de flujos de información en la red",
|
||||
@@ -2323,7 +2427,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"vpc_subnet_separate_private_public"
|
||||
"vpc_subnet_separate_private_public",
|
||||
"vpc_different_regions"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -2370,7 +2475,8 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"vpc_subnet_different_az"
|
||||
"vpc_subnet_different_az",
|
||||
"vpc_different_regions"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
693
prowler/compliance/aws/foundational_technical_review_aws.json
Normal file
@@ -0,0 +1,693 @@
|
||||
{
|
||||
"Framework": "Foundational-Technical-Review",
|
||||
"Version": "",
|
||||
"Provider": "AWS",
|
||||
"Description": "The Foundational Technical Review (FTR) assesses an AWS Partner's solution against a specific set of Amazon Web Services (AWS) best practices around security, performance, and operational processes that are most critical for customer success. Passing the FTR is required to qualify AWS Software Partners for AWS Partner Network (APN) programs such as AWS Competency and AWS Service Ready but any AWS Partner who offers a technology solution may request a FTR review through AWS Partner Central.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "HOST-001",
|
||||
"Name": "Confirm your hosting model",
|
||||
"Description": "To use this FTR checklist you must host all critical application components on AWS. You may use external providers for edge services such as content delivery networks (CDNs) or domain name system (DNS), or corporate identity providers. If you are using any edge services outside AWS, please specify them in the self-assessment.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Partner-hosted FTR requirements",
|
||||
"Subsection": "Hosting",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "SUP-001",
|
||||
"Name": "Subscribe to the AWS Business Support tier (or higher) for all production AWS accounts or have an action plan to handle issues which require help from AWS Support",
|
||||
"Description": "It is recommended that you subscribe to the AWS Business Support tier or higher (including AWS Partner-Led Support) for all of your AWS production accounts. For more information, refer to Compare AWS Support Plans. If you don't have premium support, you must have an action plan to handle issues which require help from AWS Support. AWS Support provides a mix of tools and technology, people, and programs designed to proactively help you optimize performance, lower costs, and innovate faster. AWS Business Support provides additional benefits including access to AWS Trusted Advisor and AWS Personal Health Dashboard and faster response times.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Partner-hosted FTR requirements",
|
||||
"Subsection": "Support level",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "WAFR-001",
|
||||
"Name": "Conduct periodic architecture reviews (minimum once every year)",
|
||||
"Description": "Conduct periodic architecture reviews of your production workload (at least once per year) using a documented architectural standard that includes AWS-specific best practices. If you have an internally defined standard for your AWS workloads, we recommend you use it for these reviews. If you do not have an internal standard, we recommend you use the AWS Well-Architected Framework.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Partner-hosted FTR requirements",
|
||||
"Subsection": "Architecture review",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "WAFR-002",
|
||||
"Name": "Review the AWS Shared Responsibility Models for Security and Resiliency",
|
||||
"Description": "Review the AWS Shared Responsibility Model for Security and the AWS Shared Responsibility Model for Resiliency. Ensure that your product’s architecture and operational processes address the customer responsibilities defined in these models. We recommend you to use AWS Resilience Hub to ensure your workload resiliency posture meets your targets and to provide you with operational procedures you may use to address the customer responsibilities.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Partner-hosted FTR requirements",
|
||||
"Subsection": "Architecture review",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "ARC-001",
|
||||
"Name": "Use root user only by exception",
|
||||
"Description": "The root user has unlimited access to your account and its resources, and using it only by exception helps protect your AWS resources. The AWS root user must not be used for everyday tasks, even administrative ones. Instead, adhere to the best practice of using the root user only to create your first AWS Identity and Access Management (IAM) user. Then securely lock away the root user credentials and use them to perform only a few accounts and service management tasks. To view the tasks that require you to sign in as the root user, see AWS Tasks That Require Root User. FTR does not require you to actively monitor root usage.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "AWS root account",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "ARC-003",
|
||||
"Name": "Enable multi-factor authentication (MFA) on the root user for all AWS accounts",
|
||||
"Description": "Enabling MFA provides an additional layer of protection against unauthorized access to your account. To configure MFA for the root user, follow the instructions for enabling either a virtual MFA or hardware MFA device. If you are using AWS Organizations to create new accounts, the initial password for the root user is set to a random value that is never exposed to you. If you do not recover the password for the root user of these accounts, you do not need to enable MFA on them. For any accounts where you do have access to the root user’s password, you must enable MFA",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "AWS root account",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_root_hardware_mfa_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ARC-004",
|
||||
"Name": "Remove access keys for the root user",
|
||||
"Description": "Programmatic access to AWS APIs should never use the root user. It is best not to generate static an access key for the root user. If one already exists, you should transition any processes using that key to use temporary access keys from an AWS Identity and Access Management (IAM) role, or, if necessary, static access keys from an IAM user.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "AWS root account",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_no_root_access_key"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "ARC-005",
|
||||
"Name": "Develop incident management plans",
|
||||
"Description": "An incident management plan is critical to respond, mitigate, and recover from the potential impact of security incidents. An incident management plan is a structured process for identifying, remediating, and responding in a timely matter to security incidents. An effective incident management plan must be continually iterated upon, remaining current with your cloud operations goal. For more information on developing incident management plan please see Develop incident management plans.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "AWS root account",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "ACOM-001",
|
||||
"Name": "Configure AWS account contacts",
|
||||
"Description": "If an account is not managed by AWS Organizations, alternate account contacts help AWS get in contact with the appropriate personnel if needed. Configure the account’s alternate contacts to point to a group rather than an individual. For example, create separate email distribution lists for billing, operations, and security and configure these as Billing, Security, and Operations contacts in each active AWS account. This ensures that multiple people will receive AWS notifications and be able to respond, even if someone is on vacation, changes roles, or leaves the company.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Communications from AWS",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "ACOM-002",
|
||||
"Name": "Set account contact information including the root user email address to email addresses and phone numbers owned by your company",
|
||||
"Description": "Using company owned email addresses and phone numbers for contact information enables you to access them even if the individuals whom they belong to are no longer with your organization",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Communications from AWS",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "IAM-001",
|
||||
"Name": "Enable multi-factor authentication (MFA) for all Human Identities with AWS access",
|
||||
"Description": "You must require any human identities to authenticate using MFA before accessing your AWS accounts. Typically, this means enabling MFA within your corporate identity provider. If you have existing legacy IAM users you must enable MFA for console access for those principals as well. Enabling MFA for IAM users provides an additional layer of security. With MFA, users have a device that generates a unique authentication code (a one-time password, or OTP). Users must provide both their normal credentials (user name and password) and the OTP. The MFA device can either be a special piece of hardware, or it can be a virtual device (for example, it can run in an app on a smartphone). Please note that machine identities do not require MFA.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_user_hardware_mfa_enabled",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_administrator_access_with_mfa"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "IAM-002",
|
||||
"Name": "Monitor and secure static AWS Identity and Access Management (IAM) credentials",
|
||||
"Description": "Use temporary IAM credentials retrieved by assuming a role whenever possible. In cases where it is infeasible to use IAM roles, implement the following controls to reduce the risk these credentials are misused: Rotate IAM access keys regularly (recommended at least every 90 days). Maintain an inventory of all static keys and where they are used and remove unused access keys. Implement monitoring of AWS CloudTrail logs to detect anomalous activity or other potential misuse (e.g. using AWS GuardDuty.) Define a runbook or SOP for revoking credentials in the event you detect misuse.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_with_temporary_credentials",
|
||||
"guardduty_is_enabled",
|
||||
"guardduty_no_high_severity_findings"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "IAM-003",
|
||||
"Name": "Use strong password policy",
|
||||
"Description": "Enforce a strong password policy, and educate users to avoid common or re-used passwords. For IAM users, you can create a password policy for your account on the Account Settings page of the IAM console. You can use the password policy to define password requirements, such as minimum length and whether it requires non-alphabetic characters, and so on. For more information, see Setting an Account Password Policy for IAM users.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_password_policy_expires_passwords_within_90_days_or_less",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_reuse_24",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_uppercase"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "IAM-004",
|
||||
"Name": "Create individual identities (no shared credentials) for anyone who needs AWS access",
|
||||
"Description": "Create individual entities and give unique security credentials and permissions to each user accessing your account. With individual entities and no shared credentials, you can audit the activity of each user.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "IAM-005",
|
||||
"Name": "Use IAM roles and its temporary security credentials to provide access to third parties.",
|
||||
"Description": "Do not provision IAM users and share those credentials with people outside of your organization. Any external services that need to make AWS API calls against your account (for example, a monitoring solution that accesses your account's AWS CloudWatch metrics) must use a cross-account role. For more information, refer to Providing access to AWS accounts owned by third parties.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "IAM-006",
|
||||
"Name": "Grant least privilege access",
|
||||
"Description": "You must follow the standard security advice of granting least privilege. Grant only the access that identities require by allowing access to specific actions on specific AWS resources under specific conditions. Rely on groups and identity attributes to dynamically set permissions at scale, rather than defining permissions for individual users. For example, you can allow a group of developers access to manage only resources for their project. This way, when a developer is removed from the group, access for the developer is revoked everywhere that group was used for access control, without requiring any changes to the access policies.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_policy_attached_only_to_group_or_roles"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "IAM-007",
|
||||
"Name": "Manage access based on life cycle",
|
||||
"Description": "Integrate access controls with operator and application lifecycle and your centralized federation provider and IAM. For example, remove a user’s access when they leave the organization or change roles.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "IAM-008",
|
||||
"Name": "Audit identities quarterly",
|
||||
"Description": "Auditing the identities that are configured in your identity provider and IAM helps ensure that only authorized identities have access to your workload. For example, remove people that leave the organization, and remove cross-account roles that are no longer required. Have a process in place to periodically audit permissions to the services accessed by an IAM entity. This helps you identify the policies you needto modify to remove any unused permissions. For more information, see Refining permissions in AWS using last accessed information.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "IAM-009",
|
||||
"Name": "Do not embed credentials in application code",
|
||||
"Description": "Ensure that all credentials used by your applications (for example, IAM access keys and database passwords) are never included in your application's source code or committed to source control in any way.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "IAM-0010",
|
||||
"Name": "Store secrets securely.",
|
||||
"Description": "Encrypt all secrets in transit and at rest, define fine-grained access controls that only allow access to specific identities, and log access to secrets in an audit log. We recommend you use a purpose-built secret management service such as AWS Secrets Manager, AWS Systems Manager Parameter Store, or an AWS Partner solution, but internally developed solutions that meet these requirements are also acceptable.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "IAM-0011",
|
||||
"Name": "Encrypt all end user/customer credentials and hash passwords at rest.",
|
||||
"Description": "If you are storing end user/customer credentials in a database that you manage, encrypt credentials at rest and hash passwords. As an alternative, AWS recommends using a user-identity synchronization service, such as Amazon Cognito or an equivalent AWS Partner solution.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "IAM-0012",
|
||||
"Name": "Use temporary credentials",
|
||||
"Description": "Use temporary security credentials to access AWS resources. For machine identities within AWS (for example, Amazon Elastic Compute Cloud (Amazon EC2) instances or AWS Lambda functions), always use IAM roles to acquire temporary security credentials. For machine identities running outside of AWS, use IAM Roles Anywhere or securely store static AWS access keys that are only used to assume an IAM role.For human identities, use AWS IAM Identity Center or other identity federation solutions where possible. If you must use static AWS access keys for human users, require MFA for all access, including the AWS Management Console, and AWS Command Line Interface (AWS CLI).",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Identity and Access Management",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_with_temporary_credentials",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"iam_role_administratoraccess_policy",
|
||||
"iam_role_cross_account_readonlyaccess_policy",
|
||||
"iam_role_cross_service_confused_deputy_prevention",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_user_hardware_mfa_enabled",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_administrator_access_with_mfa"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "SECOPS-001",
|
||||
"Name": "Perform vulnerability management",
|
||||
"Description": "Define a mechanism and frequency to scan and patch for vulnerabilities in your dependencies, and in your operating systems to help protect against new threats. Scan and patch your dependencies, and your operating systems on a defined schedule. Software vulnerability management is essential to keeping your system secure from threat actors. Embedding vulnerability assessments early into your continuous integration/continuous delivery (CI/CD) pipeline allows you to prioritize remediation of any security vulnerabilities detected. The solution you need to achieve this varies according to the AWS services that you are consuming. To check for vulnerabilities in software running in Amazon EC2 instances, you can add Amazon Inspector to your pipeline to cause your build to fail if Inspector detects vulnerabilities. You can also use open source products such as OWASP Dependency-Check, Snyk, OpenVAS, package managers and AWS Partner tools for vulnerability management.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Operational security",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"inspector2_is_enabled",
|
||||
"inspector2_active_findings_exist",
|
||||
"accessanalyzer_enabled_without_findings",
|
||||
"guardduty_no_high_severity_findings"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "NETSEC-001",
|
||||
"Name": "Implement the least permissive rules for all Amazon EC2 security groups",
|
||||
"Description": "All Amazon EC2 security groups should restrict access to the greatest degree possible. At a minimum, do the following: Ensure that no security groups allow ingress from 0.0.0.0/0 to port 22 or 3389 (CIS 5.2) Ensure that the default security group of every VPC restricts all traffic (CIS 5.3/Security Control EC2.2)",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Network Security",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_ami_public",
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_kafka_9092",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_postgres_5432",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_redis_6379",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_sql_server_1433_1434",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23",
|
||||
"ec2_securitygroup_allow_wide_open_public_ipv4",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_securitygroup_not_used",
|
||||
"ec2_securitygroup_with_many_ingress_egress_rules"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "NETSEC-002",
|
||||
"Name": "Restrict resources in public subnets",
|
||||
"Description": "Do not place resources in public subnets of your VPC unless they must receive network traffic from public sources. Public subnets are subnets associated with a route table that has a route to an internet gateway.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Network Security",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"vpc_subnet_no_public_ip_by_default",
|
||||
"vpc_subnet_separate_private_public",
|
||||
"vpc_endpoint_connections_trust_boundaries",
|
||||
"vpc_endpoint_services_allowed_principals_trust_boundaries",
|
||||
"workspaces_vpc_2private_1public_subnets_nat"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "BAR-001",
|
||||
"Name": "Configure automatic data backups",
|
||||
"Description": "You must perform regular backups to a durable storage service. Backups ensure that you have the ability to recover from administrative, logical, or physical error scenarios. Configure backups to be taken automatically based on a periodic schedule, or by changes in the dataset. RDS instances, EBS volumes, DynamoDB tables, and S3 objects can all be configured for automatic backup. AWS Backup, AWS Marketplace solutions or third-party solutions can also be used. If objects in S3 bucket are write-once-read-many (WORM), compensating controls such as object lock can be used meet this requirement. If it is customers’ responsibility to backup their data, it must be clearly stated in the documentation and the Partner must provide clear instructions on how to backup the data.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Backups and recovery",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"backup_plans_exist",
|
||||
"backup_reportplans_exist",
|
||||
"backup_vaults_encrypted",
|
||||
"backup_vaults_exist",
|
||||
"efs_have_backup_enabled",
|
||||
"rds_instance_backup_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "BAR-002",
|
||||
"Name": "Periodically recover data to verify the integrity of your backup process",
|
||||
"Description": "To confirm that your backup process meets your recovery time objectives (RTO) and recovery point objectives (RPO), run a recovery test on a regular schedule and after making significant changes to your cloud environment. For more information, refer to Getting Started - Backup and Restore with AWS.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Backups and recovery",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "RES-001",
|
||||
"Name": "Define a Recovery Point Objective (RPO)",
|
||||
"Description": "To confirm that your backup process meets your recovery time objectives (RTO) and recovery point objectives (RPO), run a recovery test on a regular schedule and after making significant changes to your cloud environment. For more information, refer to Getting Started - Backup and Restore with AWS.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Resiliency",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "RES-002",
|
||||
"Name": "Establish a Recovery Time Objective (RTO)",
|
||||
"Description": "Define an RTO that meets your organization’s needs and expectations. RTO is the maximum acceptable delay your organization will accept between the interruption and restoration of service.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Resiliency",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "RES-004",
|
||||
"Name": "Resiliency Testing",
|
||||
"Description": "Test resiliency to ensure that RTO and RPO are met, both periodically (minimum every 12 months) and after major updates. The resiliency test must include accidental data loss, instance failures, and Availability Zone (AZ) failures. At least one resilience test that meets RTO and RPO requirements must be completed prior to FTR approval. You can use AWS Resilience Hub to test and verify your workloads to see if it meets its resilience target. AWS Resilience Hub works with AWS Fault Injection Service (AWS FIS) , a chaos engineering service, to provide fault-injection simulations of real-world failures to validate the application recovers within the resilience targets you defined. AWS Resilience Hub also provides API operations for you to integrate its resilience assessment and testing into your CI/CD pipelines for ongoing resilience validation. Including resilience validation in CI/CD pipelines helps make sure that changes to the workload’s underlying infrastructure don't compromise resilience.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Resiliency",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "RES-005",
|
||||
"Name": "Communicate customer responsibilities for resilience",
|
||||
"Description": "Clearly define your customers’ responsibility for backup, recovery, and availability. At a minimum, your product documentation or customer agreements should cover the following: Responsibility the customer has for backing up the data stored in your solution. Instructions for backing up data or configuring optional features in your product for data protection, if applicable. Options customers have for configuring the availability of your product.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Resiliency",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "RES-006",
|
||||
"Name": "Architect your product to meet availability targets and uptime service level agreements (SLAs)",
|
||||
"Description": "If you publish or privately agree to availability targets or uptime SLAs, ensure that your architecture and operational processes are designed to support them. Additionally, provide clear guidance to customers on any configuration required to achieve the targets or SLAs.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Resiliency",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "RES-007",
|
||||
"Name": "Define a customer communication plan for outages",
|
||||
"Description": "Establish a plan for communicating information about system outages to your customers both during and after incidents. Your communication should not include any data that was provided by AWS under a non-disclosure agreement (NDA).",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Resiliency",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "S3-001",
|
||||
"Name": "Review all Amazon S3 buckets to determine appropriate access levels",
|
||||
"Description": "You must ensure that buckets that require public access have been reviewed to determine if public read or write access is needed and if appropriate controls are in place to control public access. When assigning access permissions, follow the principle of least privilege, an AWS best practice. For more information, refer to overview of managing access.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Amazon S3 bucket access",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_bucket_acl_prohibited",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_kms_encryption",
|
||||
"s3_bucket_level_public_access_block",
|
||||
"s3_bucket_object_lock",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_public_list_acl",
|
||||
"s3_bucket_public_write_acl",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_server_access_logging_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "CAA-001",
|
||||
"Name": "Use cross-account roles to access customer AWS accounts",
|
||||
"Description": "Cross-account roles reduce the amount of sensitive information AWS Partners need to store for their customers.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Cross-account access",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "CAA-007",
|
||||
"Name": "Provide guidance or an automated setup mechanism (for example, an AWS CloudFormation template) for creating cross-account roles with the minimum required privileges",
|
||||
"Description": "The policy created for cross-account access in customer accounts must follow the principle of least privilege. The AWS Partner must provide a role-policy document or an automated setup mechanism (for example, an AWS CloudFormation template) for the customers to use to ensure that the roles are created with minimum required privileges. For more information, refer to the AWS Partner Network (APN) blog posts.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Cross-account access",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "CAA-002",
|
||||
"Name": "Use an external ID with cross-account roles to access customer accounts",
|
||||
"Description": "An external ID allows the user that is assuming the role to assert the circumstances in which they are operating. It also provides a way for the account owner to permit the role to be assumed only under specific circumstances. The primary function of the external ID is to address and prevent the confused deputy problem.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Cross-account access",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "CAA-004",
|
||||
"Name": "Use a value you generate (not something provided by the customer) for the external ID",
|
||||
"Description": "When configuring cross-account access using IAM roles, you must use a value you generate for the external ID, instead of one provided by the customer, to ensure the integrity of the cross-account role configuration. A partner-generated external ID ensures that malicious parties cannot impersonate a customer's configuration and enforces uniqueness and format consistency across all customers. If you are not generating an external ID today we recommend implementing a process that generates a random unique value (such as a Universally Unique Identifier) for the external ID that a customer uses to set up a cross-account role.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Cross-account access",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "CAA-005",
|
||||
"Name": "Ensure that all external IDs are unique.",
|
||||
"Description": "The external IDs used must be unique across all customers. Re-using external IDs for different customers does not solve the confused deputy problem and runs the risk of customer A being able to view data of customer B by using the role ARN and the external ID of customer B. To resolve this, we recommend implementing a process that ensures a random unique value, such as a Universally Unique Identifier, is generated for the external ID that a customer would use to setup a cross account role.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Cross-account access",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "CAA-006",
|
||||
"Name": "Provide read-only access to external ID to customers",
|
||||
"Description": "Customers must not be able to set or influence external IDs. When the external ID is editable, it is possible for one customer to impersonate the configuration of another. For example, when the external ID is editable, customer A can create a cross account role setup using customer B’s role ARN and external ID, granting customer A access to customer B’s data. Remediation of this item involves making the external ID a view-only field, ensuring that the external ID cannot be changed to impersonate the setup of another customer.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Cross-account access",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "CAA-003",
|
||||
"Name": "Deprecate any historical use of customer-provided IAM credentials",
|
||||
"Description": "If your application provides legacy support for the use of static IAM credentials for cross-account access, the application's user interface and customer documentation must make it clear that this method is deprecated. Existing customers should be encouraged to switch to cross-account role based-access, and collection of credentials should be disabled for new customers.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Cross-account access",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "SDAT-001",
|
||||
"Name": "Identify sensitive data (for example, Personally Identifiable Information (PII) and Protected Health Information (PHI))",
|
||||
"Description": "Data classification enables you to determine which data needs to be protected and how. Based on the workload and the data it processes, identify the data that is not common public knowledge.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Sensitive data",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "SDAT-002",
|
||||
"Name": "Encrypt all sensitive data at rest",
|
||||
"Description": "Encryption maintains the confidentiality of sensitive data even when it gets stolen or the network through which it is transmitted becomes compromised.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Sensitive data",
|
||||
"Type": "Automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"sns_topics_kms_encryption_at_rest_enabled",
|
||||
"athena_workgroup_encryption",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"dynamodb_accelerator_cluster_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"opensearch_service_domains_encryption_at_rest_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "SDAT-003",
|
||||
"Name": "Only use protocols with encryption when transmitting sensitive data outside of your VPC",
|
||||
"Description": "Encryption maintains data confidentiality even when the network through which it is transmitted becomes compromised.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Sensitive data",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
},
|
||||
{
|
||||
"Id": "RCVP-001",
|
||||
"Name": "Establish a process to ensure that all required compliance standards are met",
|
||||
"Description": "If you advertise that your product meets specific compliance standards, you must have an internal process for ensuring compliance. Examples of compliance standards include Payment Card Industry Data Security Standard (PCI DSS) PCI DSS, Federal Risk and Authorization Management Program (FedRAMP)FedRAMP, and U.S. Health Insurance Portability and Accountability Act (HIPAA)HIPAA. Applicable compliance standards are determined by various factors, such as what types of data the solution stores or transmits and which geographic regions the solution supports.",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Architectural and Operational Controls",
|
||||
"Subsection": "Regulatory compliance validation process",
|
||||
"Type": "Manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -29,7 +29,8 @@
|
||||
"securityhub_enabled",
|
||||
"elbv2_waf_acl_attached",
|
||||
"guardduty_is_enabled",
|
||||
"inspector2_findings_exist",
|
||||
"inspector2_is_enabled",
|
||||
"inspector2_active_findings_exist",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
"ec2_instance_public_ip"
|
||||
],
|
||||
@@ -576,7 +577,8 @@
|
||||
"config_recorder_all_regions_enabled",
|
||||
"securityhub_enabled",
|
||||
"guardduty_is_enabled",
|
||||
"inspector2_findings_exist"
|
||||
"inspector2_is_enabled",
|
||||
"inspector2_active_findings_exist"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -737,7 +739,8 @@
|
||||
"iam_user_hardware_mfa_enabled",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"securityhub_enabled",
|
||||
"inspector2_findings_exist"
|
||||
"inspector2_is_enabled",
|
||||
"inspector2_active_findings_exist"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -1892,7 +1895,8 @@
|
||||
"networkfirewall_in_all_vpc",
|
||||
"elbv2_waf_acl_attached",
|
||||
"guardduty_is_enabled",
|
||||
"inspector2_findings_exist",
|
||||
"inspector2_is_enabled",
|
||||
"inspector2_active_findings_exist",
|
||||
"ec2_networkacl_allow_ingress_any_port",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_3389",
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"ItemId": "cc_1_1",
|
||||
"Section": "CC1.0 - Common Criteria Related to Control Environment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -27,7 +27,7 @@
|
||||
"ItemId": "cc_1_2",
|
||||
"Section": "CC1.0 - Common Criteria Related to Control Environment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -41,7 +41,7 @@
|
||||
"ItemId": "cc_1_3",
|
||||
"Section": "CC1.0 - Common Criteria Related to Control Environment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -62,7 +62,7 @@
|
||||
"ItemId": "cc_1_4",
|
||||
"Section": "CC1.0 - Common Criteria Related to Control Environment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -76,7 +76,7 @@
|
||||
"ItemId": "cc_1_5",
|
||||
"Section": "CC1.0 - Common Criteria Related to Control Environment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -90,7 +90,7 @@
|
||||
"ItemId": "cc_2_1",
|
||||
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -109,7 +109,7 @@
|
||||
"ItemId": "cc_2_2",
|
||||
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -123,7 +123,7 @@
|
||||
"ItemId": "cc_2_3",
|
||||
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -137,7 +137,7 @@
|
||||
"ItemId": "cc_3_1",
|
||||
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -155,7 +155,7 @@
|
||||
"ItemId": "cc_3_2",
|
||||
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -175,7 +175,7 @@
|
||||
"ItemId": "cc_3_3",
|
||||
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -189,7 +189,7 @@
|
||||
"ItemId": "cc_3_4",
|
||||
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
|
||||
"Service": "config",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -205,7 +205,7 @@
|
||||
"ItemId": "cc_4_1",
|
||||
"Section": "CC4.0 - Monitoring Activities",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -219,7 +219,7 @@
|
||||
"ItemId": "cc_4_2",
|
||||
"Section": "CC4.0 - Monitoring Activities",
|
||||
"Service": "guardduty",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -236,7 +236,7 @@
|
||||
"ItemId": "cc_5_1",
|
||||
"Section": "CC5.0 - Control Activities",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -250,7 +250,7 @@
|
||||
"ItemId": "cc_5_2",
|
||||
"Section": "CC5.0 - Control Activities",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -264,7 +264,7 @@
|
||||
"ItemId": "cc_5_3",
|
||||
"Section": "CC5.0 - Control Activities",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -278,7 +278,7 @@
|
||||
"ItemId": "cc_6_1",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "s3",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -294,7 +294,7 @@
|
||||
"ItemId": "cc_6_2",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "rds",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -310,7 +310,7 @@
|
||||
"ItemId": "cc_6_3",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "iam",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -328,7 +328,7 @@
|
||||
"ItemId": "cc_6_4",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -342,7 +342,7 @@
|
||||
"ItemId": "cc_6_5",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -356,7 +356,7 @@
|
||||
"ItemId": "cc_6_6",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "ec2",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -372,7 +372,7 @@
|
||||
"ItemId": "cc_6_7",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "acm",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -388,7 +388,7 @@
|
||||
"ItemId": "cc_6_8",
|
||||
"Section": "CC6.0 - Logical and Physical Access",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -405,7 +405,7 @@
|
||||
"ItemId": "cc_7_1",
|
||||
"Section": "CC7.0 - System Operations",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -424,7 +424,7 @@
|
||||
"ItemId": "cc_7_2",
|
||||
"Section": "CC7.0 - System Operations",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -460,7 +460,7 @@
|
||||
"ItemId": "cc_7_3",
|
||||
"Section": "CC7.0 - System Operations",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -492,7 +492,7 @@
|
||||
"ItemId": "cc_7_4",
|
||||
"Section": "CC7.0 - System Operations",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -523,7 +523,7 @@
|
||||
"ItemId": "cc_7_5",
|
||||
"Section": "CC7.0 - System Operations",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -537,7 +537,7 @@
|
||||
"ItemId": "cc_8_1",
|
||||
"Section": "CC8.0 - Change Management",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -553,7 +553,7 @@
|
||||
"ItemId": "cc_9_1",
|
||||
"Section": "CC9.0 - Risk Mitigation",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -567,7 +567,7 @@
|
||||
"ItemId": "cc_9_2",
|
||||
"Section": "CC9.0 - Risk Mitigation",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -581,7 +581,7 @@
|
||||
"ItemId": "cc_a_1_1",
|
||||
"Section": "CCA1.0 - Additional Criterial for Availability",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -595,7 +595,7 @@
|
||||
"ItemId": "cc_a_1_2",
|
||||
"Section": "CCA1.0 - Additional Criterial for Availability",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -626,7 +626,7 @@
|
||||
"ItemId": "cc_a_1_3",
|
||||
"Section": "CCA1.0 - Additional Criterial for Availability",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -640,7 +640,7 @@
|
||||
"ItemId": "cc_c_1_1",
|
||||
"Section": "CCC1.0 - Additional Criterial for Confidentiality",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -656,7 +656,7 @@
|
||||
"ItemId": "cc_c_1_2",
|
||||
"Section": "CCC1.0 - Additional Criterial for Confidentiality",
|
||||
"Service": "s3",
|
||||
"Soc_Type": "automated"
|
||||
"Type": "automated"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
@@ -672,7 +672,7 @@
|
||||
"ItemId": "p_1_1",
|
||||
"Section": "P1.0 - Privacy Criteria Related to Notice and Communication of Objectives Related to Privacy",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -686,7 +686,7 @@
|
||||
"ItemId": "p_2_1",
|
||||
"Section": "P2.0 - Privacy Criteria Related to Choice and Consent",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -700,7 +700,7 @@
|
||||
"ItemId": "p_3_1",
|
||||
"Section": "P3.0 - Privacy Criteria Related to Collection",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -714,7 +714,7 @@
|
||||
"ItemId": "p_3_2",
|
||||
"Section": "P3.0 - Privacy Criteria Related to Collection",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -728,7 +728,7 @@
|
||||
"ItemId": "p_4_1",
|
||||
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -742,7 +742,7 @@
|
||||
"ItemId": "p_4_2",
|
||||
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -756,7 +756,7 @@
|
||||
"ItemId": "p_4_3",
|
||||
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -770,7 +770,7 @@
|
||||
"ItemId": "p_5_1",
|
||||
"Section": "P5.0 - Privacy Criteria Related to Access",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -784,7 +784,7 @@
|
||||
"ItemId": "p_5_2",
|
||||
"Section": "P5.0 - Privacy Criteria Related to Access",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -798,7 +798,7 @@
|
||||
"ItemId": "p_6_1",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -812,7 +812,7 @@
|
||||
"ItemId": "p_6_2",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -826,7 +826,7 @@
|
||||
"ItemId": "p_6_3",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -840,7 +840,7 @@
|
||||
"ItemId": "p_6_4",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -854,7 +854,7 @@
|
||||
"ItemId": "p_6_5",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -868,7 +868,7 @@
|
||||
"ItemId": "p_6_6",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -882,7 +882,7 @@
|
||||
"ItemId": "p_6_7",
|
||||
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -896,7 +896,7 @@
|
||||
"ItemId": "p_7_1",
|
||||
"Section": "P7.0 - Privacy Criteria Related to Quality",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
@@ -910,7 +910,7 @@
|
||||
"ItemId": "p_8_1",
|
||||
"Section": "P8.0 - Privacy Criteria Related to Monitoring and Enforcement",
|
||||
"Service": "aws",
|
||||
"Soc_Type": "manual"
|
||||
"Type": "manual"
|
||||
}
|
||||
],
|
||||
"Checks": []
|
||||
|
||||
@@ -38,6 +38,9 @@ Allowlist:
|
||||
- "aws-controltower-ReadOnlyExecutionRole"
|
||||
- "AWSControlTower_VPCFlowLogsRole"
|
||||
- "AWSControlTowerExecution"
|
||||
- "AWSAFTAdmin"
|
||||
- "AWSAFTExecution"
|
||||
- "AWSAFTService"
|
||||
"iam_policy_*":
|
||||
Regions:
|
||||
- "*"
|
||||
|
||||
@@ -11,7 +11,7 @@ from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "3.11.0"
|
||||
prowler_version = "3.15.0"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
|
||||
square_logo_img = "https://user-images.githubusercontent.com/38561120/235905862-9ece5bd7-9aa3-4e48-807a-3a9035eb8bfb.png"
|
||||
@@ -22,6 +22,9 @@ gcp_logo = "https://user-images.githubusercontent.com/38561120/235928332-eb4accd
|
||||
orange_color = "\033[38;5;208m"
|
||||
banner_color = "\033[1;92m"
|
||||
|
||||
# Severities
|
||||
valid_severities = ["critical", "high", "medium", "low", "informational"]
|
||||
|
||||
# Compliance
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
@@ -70,7 +73,9 @@ def check_current_version():
|
||||
if latest_version != prowler_version:
|
||||
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
|
||||
else:
|
||||
return f"{prowler_version_string} (it is the latest version, yay!)"
|
||||
return (
|
||||
f"{prowler_version_string} (You are running the latest version, yay!)"
|
||||
)
|
||||
except requests.RequestException:
|
||||
return f"{prowler_version_string}"
|
||||
except Exception:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
aws:
|
||||
|
||||
# AWS Global Configuration
|
||||
# aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for GuardDuty, SecurityHub, DRS and Config
|
||||
# aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config
|
||||
allowlist_non_default_regions: False
|
||||
# If you want to allowlist/mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w allowlist.yaml`:
|
||||
# Allowlist:
|
||||
@@ -69,8 +69,8 @@ aws:
|
||||
# AWS Organizations
|
||||
# organizations_scp_check_deny_regions
|
||||
# organizations_enabled_regions: [
|
||||
# 'eu-central-1',
|
||||
# 'eu-west-1',
|
||||
# "eu-central-1",
|
||||
# "eu-west-1",
|
||||
# "us-east-1"
|
||||
# ]
|
||||
organizations_enabled_regions: []
|
||||
@@ -89,6 +89,20 @@ aws:
|
||||
|
||||
# Azure Configuration
|
||||
azure:
|
||||
# Azure Network Configuration
|
||||
# azure.network_public_ip_shodan
|
||||
shodan_api_key: null
|
||||
|
||||
# Azure App Service
|
||||
# azure.app_ensure_php_version_is_latest
|
||||
php_latest_version: "8.2"
|
||||
# azure.app_ensure_python_version_is_latest
|
||||
python_latest_version: "3.12"
|
||||
# azure.app_ensure_java_version_is_latest
|
||||
java_latest_version: "17"
|
||||
|
||||
# GCP Configuration
|
||||
gcp:
|
||||
# GCP Compute Configuration
|
||||
# gcp.compute_public_address_shodan
|
||||
shodan_api_key: null
|
||||
|
||||
15
prowler/config/custom_checks_metadata_example.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
CustomChecksMetadata:
|
||||
aws:
|
||||
Checks:
|
||||
s3_bucket_level_public_access_block:
|
||||
Severity: high
|
||||
s3_bucket_no_mfa_delete:
|
||||
Severity: high
|
||||
azure:
|
||||
Checks:
|
||||
storage_infrastructure_encryption_is_enabled:
|
||||
Severity: medium
|
||||
gcp:
|
||||
Checks:
|
||||
compute_instance_public_ip:
|
||||
Severity: critical
|
||||
@@ -4,7 +4,7 @@ from prowler.config.config import banner_color, orange_color, prowler_version, t
|
||||
|
||||
|
||||
def print_banner(args):
|
||||
banner = f"""{banner_color} _
|
||||
banner = rf"""{banner_color} _
|
||||
_ __ _ __ _____ _| | ___ _ __
|
||||
| '_ \| '__/ _ \ \ /\ / / |/ _ \ '__|
|
||||
| |_) | | | (_) \ V V /| | __/ |
|
||||
|
||||
@@ -16,6 +16,7 @@ from colorama import Fore, Style
|
||||
import prowler
|
||||
from prowler.config.config import orange_color
|
||||
from prowler.lib.check.compliance_models import load_compliance_framework
|
||||
from prowler.lib.check.custom_checks_metadata import update_check_metadata
|
||||
from prowler.lib.check.models import Check, load_check_metadata
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.outputs import report
|
||||
@@ -66,9 +67,9 @@ def bulk_load_compliance_frameworks(provider: str) -> dict:
|
||||
# cis_v1.4_aws.json --> cis_v1.4_aws
|
||||
compliance_framework_name = filename.split(".json")[0]
|
||||
# Store the compliance info
|
||||
bulk_compliance_frameworks[
|
||||
compliance_framework_name
|
||||
] = load_compliance_framework(file_path)
|
||||
bulk_compliance_frameworks[compliance_framework_name] = (
|
||||
load_compliance_framework(file_path)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
@@ -106,14 +107,20 @@ def exclude_services_to_run(
|
||||
|
||||
# Load checks from checklist.json
|
||||
def parse_checks_from_file(input_file: str, provider: str) -> set:
|
||||
checks_to_execute = set()
|
||||
with open_file(input_file) as f:
|
||||
json_file = parse_json_file(f)
|
||||
"""parse_checks_from_file returns a set of checks read from the given file"""
|
||||
try:
|
||||
checks_to_execute = set()
|
||||
with open_file(input_file) as f:
|
||||
json_file = parse_json_file(f)
|
||||
|
||||
for check_name in json_file[provider]:
|
||||
checks_to_execute.add(check_name)
|
||||
for check_name in json_file[provider]:
|
||||
checks_to_execute.add(check_name)
|
||||
|
||||
return checks_to_execute
|
||||
return checks_to_execute
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
|
||||
|
||||
# Load checks from custom folder
|
||||
@@ -210,7 +217,7 @@ def print_categories(categories: set):
|
||||
singular_string = f"\nThere is {Fore.YELLOW}{categories_num}{Style.RESET_ALL} available category.\n"
|
||||
|
||||
message = plural_string if categories_num > 1 else singular_string
|
||||
for category in categories:
|
||||
for category in sorted(categories):
|
||||
print(f"- {category}")
|
||||
|
||||
print(message)
|
||||
@@ -239,7 +246,7 @@ def print_compliance_frameworks(
|
||||
singular_string = f"\nThere is {Fore.YELLOW}{frameworks_num}{Style.RESET_ALL} available Compliance Framework.\n"
|
||||
message = plural_string if frameworks_num > 1 else singular_string
|
||||
|
||||
for framework in bulk_compliance_frameworks.keys():
|
||||
for framework in sorted(bulk_compliance_frameworks.keys()):
|
||||
print(f"- {framework}")
|
||||
|
||||
print(message)
|
||||
@@ -309,7 +316,7 @@ def print_checks(
|
||||
def parse_checks_from_compliance_framework(
|
||||
compliance_frameworks: list, bulk_compliance_frameworks: dict
|
||||
) -> list:
|
||||
"""Parse checks from compliance frameworks specification"""
|
||||
"""parse_checks_from_compliance_framework returns a set of checks from the given compliance_frameworks"""
|
||||
checks_to_execute = set()
|
||||
try:
|
||||
for framework in compliance_frameworks:
|
||||
@@ -416,6 +423,7 @@ def execute_checks(
|
||||
provider: str,
|
||||
audit_info: Any,
|
||||
audit_output_options: Provider_Output_Options,
|
||||
custom_checks_metadata: Any,
|
||||
) -> list:
|
||||
# List to store all the check's findings
|
||||
all_findings = []
|
||||
@@ -461,6 +469,7 @@ def execute_checks(
|
||||
audit_info,
|
||||
services_executed,
|
||||
checks_executed,
|
||||
custom_checks_metadata,
|
||||
)
|
||||
all_findings.extend(check_findings)
|
||||
|
||||
@@ -506,6 +515,7 @@ def execute_checks(
|
||||
audit_info,
|
||||
services_executed,
|
||||
checks_executed,
|
||||
custom_checks_metadata,
|
||||
)
|
||||
all_findings.extend(check_findings)
|
||||
|
||||
@@ -531,6 +541,7 @@ def execute(
|
||||
audit_info: Any,
|
||||
services_executed: set,
|
||||
checks_executed: set,
|
||||
custom_checks_metadata: Any,
|
||||
):
|
||||
# Import check module
|
||||
check_module_path = (
|
||||
@@ -541,6 +552,10 @@ def execute(
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
c = check_to_execute()
|
||||
|
||||
# Update check metadata to reflect that in the outputs
|
||||
if custom_checks_metadata and custom_checks_metadata["Checks"].get(c.CheckID):
|
||||
c = update_check_metadata(c, custom_checks_metadata["Checks"][c.CheckID])
|
||||
|
||||
# Run check
|
||||
check_findings = run_check(c, audit_output_options)
|
||||
|
||||
@@ -598,22 +613,32 @@ def update_audit_metadata(
|
||||
)
|
||||
|
||||
|
||||
def recover_checks_from_service(service_list: list, provider: str) -> list:
|
||||
checks = set()
|
||||
service_list = [
|
||||
"awslambda" if service == "lambda" else service for service in service_list
|
||||
]
|
||||
for service in service_list:
|
||||
modules = recover_checks_from_provider(provider, service)
|
||||
if not modules:
|
||||
logger.error(f"Service '{service}' does not have checks.")
|
||||
def recover_checks_from_service(service_list: list, provider: str) -> set:
|
||||
"""
|
||||
Recover all checks from the selected provider and service
|
||||
|
||||
else:
|
||||
for check_module in modules:
|
||||
# Recover check name and module name from import path
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check_module[0].split(".")[-1]
|
||||
# If the service is present in the group list passed as parameters
|
||||
# if service_name in group_list: checks_from_arn.add(check_name)
|
||||
checks.add(check_name)
|
||||
return checks
|
||||
Returns a set of checks from the given services
|
||||
"""
|
||||
try:
|
||||
checks = set()
|
||||
service_list = [
|
||||
"awslambda" if service == "lambda" else service for service in service_list
|
||||
]
|
||||
for service in service_list:
|
||||
service_checks = recover_checks_from_provider(provider, service)
|
||||
if not service_checks:
|
||||
logger.error(f"Service '{service}' does not have checks.")
|
||||
|
||||
else:
|
||||
for check in service_checks:
|
||||
# Recover check name and module name from import path
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check[0].split(".")[-1]
|
||||
# If the service is present in the group list passed as parameters
|
||||
# if service_name in group_list: checks_from_arn.add(check_name)
|
||||
checks.add(check_name)
|
||||
return checks
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from colorama import Fore, Style
|
||||
|
||||
from prowler.config.config import valid_severities
|
||||
from prowler.lib.check.check import (
|
||||
parse_checks_from_compliance_framework,
|
||||
parse_checks_from_file,
|
||||
@@ -10,7 +11,6 @@ from prowler.lib.logger import logger
|
||||
|
||||
|
||||
# Generate the list of checks to execute
|
||||
# PENDING Test for this function
|
||||
def load_checks_to_execute(
|
||||
bulk_checks_metadata: dict,
|
||||
bulk_compliance_frameworks: dict,
|
||||
@@ -22,80 +22,116 @@ def load_checks_to_execute(
|
||||
categories: set,
|
||||
provider: str,
|
||||
) -> set:
|
||||
"""Generate the list of checks to execute based on the cloud provider and input arguments specified"""
|
||||
checks_to_execute = set()
|
||||
"""Generate the list of checks to execute based on the cloud provider and the input arguments given"""
|
||||
try:
|
||||
# Local subsets
|
||||
checks_to_execute = set()
|
||||
check_aliases = {}
|
||||
check_severities = {key: [] for key in valid_severities}
|
||||
check_categories = {}
|
||||
|
||||
# Handle if there are checks passed using -c/--checks
|
||||
if check_list:
|
||||
for check_name in check_list:
|
||||
checks_to_execute.add(check_name)
|
||||
# First, loop over the bulk_checks_metadata to extract the needed subsets
|
||||
for check, metadata in bulk_checks_metadata.items():
|
||||
try:
|
||||
# Aliases
|
||||
for alias in metadata.CheckAliases:
|
||||
if alias not in check_aliases:
|
||||
check_aliases[alias] = []
|
||||
check_aliases[alias].append(check)
|
||||
|
||||
# Handle if there are some severities passed using --severity
|
||||
elif severities:
|
||||
for check in bulk_checks_metadata:
|
||||
# Check check's severity
|
||||
if bulk_checks_metadata[check].Severity in severities:
|
||||
checks_to_execute.add(check)
|
||||
# Severities
|
||||
if metadata.Severity:
|
||||
check_severities[metadata.Severity].append(check)
|
||||
|
||||
# Handle if there are checks passed using -C/--checks-file
|
||||
elif checks_file:
|
||||
try:
|
||||
# Categories
|
||||
for category in metadata.Categories:
|
||||
if category not in check_categories:
|
||||
check_categories[category] = []
|
||||
check_categories[category].append(check)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
|
||||
# Handle if there are checks passed using -c/--checks
|
||||
if check_list:
|
||||
for check_name in check_list:
|
||||
checks_to_execute.add(check_name)
|
||||
|
||||
# Handle if there are some severities passed using --severity
|
||||
elif severities:
|
||||
for severity in severities:
|
||||
checks_to_execute.update(check_severities[severity])
|
||||
|
||||
if service_list:
|
||||
checks_to_execute = (
|
||||
recover_checks_from_service(service_list, provider)
|
||||
& checks_to_execute
|
||||
)
|
||||
|
||||
# Handle if there are checks passed using -C/--checks-file
|
||||
elif checks_file:
|
||||
checks_to_execute = parse_checks_from_file(checks_file, provider)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
# Handle if there are services passed using -s/--services
|
||||
elif service_list:
|
||||
checks_to_execute = recover_checks_from_service(service_list, provider)
|
||||
# Handle if there are services passed using -s/--services
|
||||
elif service_list:
|
||||
checks_to_execute = recover_checks_from_service(service_list, provider)
|
||||
|
||||
# Handle if there are compliance frameworks passed using --compliance
|
||||
elif compliance_frameworks:
|
||||
try:
|
||||
# Handle if there are compliance frameworks passed using --compliance
|
||||
elif compliance_frameworks:
|
||||
checks_to_execute = parse_checks_from_compliance_framework(
|
||||
compliance_frameworks, bulk_compliance_frameworks
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
# Handle if there are categories passed using --categories
|
||||
elif categories:
|
||||
for cat in categories:
|
||||
for check in bulk_checks_metadata:
|
||||
# Check check's categories
|
||||
if cat in bulk_checks_metadata[check].Categories:
|
||||
checks_to_execute.add(check)
|
||||
# Handle if there are categories passed using --categories
|
||||
elif categories:
|
||||
for category in categories:
|
||||
checks_to_execute.update(check_categories[category])
|
||||
|
||||
# If there are no checks passed as argument
|
||||
else:
|
||||
try:
|
||||
# If there are no checks passed as argument
|
||||
else:
|
||||
# Get all check modules to run with the specific provider
|
||||
checks = recover_checks_from_provider(provider)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
else:
|
||||
|
||||
for check_info in checks:
|
||||
# Recover check name from import path (last part)
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check_info[0]
|
||||
checks_to_execute.add(check_name)
|
||||
|
||||
# Get Check Aliases mapping
|
||||
check_aliases = {}
|
||||
for check, metadata in bulk_checks_metadata.items():
|
||||
for alias in metadata.CheckAliases:
|
||||
check_aliases[alias] = check
|
||||
# Check Aliases
|
||||
checks_to_execute = update_checks_to_execute_with_aliases(
|
||||
checks_to_execute, check_aliases
|
||||
)
|
||||
|
||||
return checks_to_execute
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
return checks_to_execute
|
||||
|
||||
|
||||
def update_checks_to_execute_with_aliases(
|
||||
checks_to_execute: set, check_aliases: dict
|
||||
) -> set:
|
||||
"""update_checks_to_execute_with_aliases returns the checks_to_execute updated using the check aliases."""
|
||||
# Verify if any input check is an alias of another check
|
||||
for input_check in checks_to_execute:
|
||||
if (
|
||||
input_check in check_aliases
|
||||
and check_aliases[input_check] not in checks_to_execute
|
||||
):
|
||||
# Remove input check name and add the real one
|
||||
checks_to_execute.remove(input_check)
|
||||
checks_to_execute.add(check_aliases[input_check])
|
||||
print(
|
||||
f"\nUsing alias {Fore.YELLOW}{input_check}{Style.RESET_ALL} for check {Fore.YELLOW}{check_aliases[input_check]}{Style.RESET_ALL}...\n"
|
||||
)
|
||||
|
||||
return checks_to_execute
|
||||
try:
|
||||
new_checks_to_execute = checks_to_execute.copy()
|
||||
for input_check in checks_to_execute:
|
||||
if input_check in check_aliases:
|
||||
# Remove input check name and add the real one
|
||||
new_checks_to_execute.remove(input_check)
|
||||
for alias in check_aliases[input_check]:
|
||||
if alias not in new_checks_to_execute:
|
||||
new_checks_to_execute.add(alias)
|
||||
print(
|
||||
f"\nUsing alias {Fore.YELLOW}{input_check}{Style.RESET_ALL} for check {Fore.YELLOW}{alias}{Style.RESET_ALL}..."
|
||||
)
|
||||
return new_checks_to_execute
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
|
||||
@@ -53,14 +53,14 @@ def update_checks_metadata_with_compliance(
|
||||
check_compliance.append(compliance)
|
||||
# Create metadata for Manual Control
|
||||
manual_check_metadata = {
|
||||
"Provider": "aws",
|
||||
"Provider": framework.Provider.lower(),
|
||||
"CheckID": "manual_check",
|
||||
"CheckTitle": "Manual Check",
|
||||
"CheckType": [],
|
||||
"ServiceName": "",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "",
|
||||
"Description": "",
|
||||
"Risk": "",
|
||||
|
||||
@@ -52,12 +52,12 @@ class ENS_Requirement_Attribute(BaseModel):
|
||||
class Generic_Compliance_Requirement_Attribute(BaseModel):
|
||||
"""Generic Compliance Requirement Attribute"""
|
||||
|
||||
ItemId: str
|
||||
ItemId: Optional[str]
|
||||
Section: Optional[str]
|
||||
SubSection: Optional[str]
|
||||
SubGroup: Optional[str]
|
||||
Service: str
|
||||
Soc_Type: Optional[str]
|
||||
Service: Optional[str]
|
||||
Type: Optional[str]
|
||||
|
||||
|
||||
class CIS_Requirement_Attribute_Profile(str):
|
||||
@@ -151,9 +151,9 @@ class Compliance_Requirement(BaseModel):
|
||||
Union[
|
||||
CIS_Requirement_Attribute,
|
||||
ENS_Requirement_Attribute,
|
||||
Generic_Compliance_Requirement_Attribute,
|
||||
ISO27001_2013_Requirement_Attribute,
|
||||
AWS_Well_Architected_Requirement_Attribute,
|
||||
Generic_Compliance_Requirement_Attribute,
|
||||
]
|
||||
]
|
||||
Checks: list[str]
|
||||
|
||||
77
prowler/lib/check/custom_checks_metadata.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
from jsonschema import validate
|
||||
|
||||
from prowler.config.config import valid_severities
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
custom_checks_metadata_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Checks": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
".*": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Severity": {
|
||||
"type": "string",
|
||||
"enum": valid_severities,
|
||||
}
|
||||
},
|
||||
"required": ["Severity"],
|
||||
"additionalProperties": False,
|
||||
}
|
||||
},
|
||||
"additionalProperties": False,
|
||||
}
|
||||
},
|
||||
"required": ["Checks"],
|
||||
"additionalProperties": False,
|
||||
}
|
||||
|
||||
|
||||
def parse_custom_checks_metadata_file(provider: str, parse_custom_checks_metadata_file):
|
||||
"""parse_custom_checks_metadata_file returns the custom_checks_metadata object if it is valid, otherwise aborts the execution returning the ValidationError."""
|
||||
try:
|
||||
with open(parse_custom_checks_metadata_file) as f:
|
||||
custom_checks_metadata = yaml.safe_load(f)["CustomChecksMetadata"][provider]
|
||||
validate(custom_checks_metadata, schema=custom_checks_metadata_schema)
|
||||
return custom_checks_metadata
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def update_checks_metadata(bulk_checks_metadata, custom_checks_metadata):
|
||||
"""update_checks_metadata returns the bulk_checks_metadata with the check's metadata updated based on the custom_checks_metadata provided."""
|
||||
try:
|
||||
# Update checks metadata from CustomChecksMetadata file
|
||||
for check, custom_metadata in custom_checks_metadata["Checks"].items():
|
||||
check_metadata = bulk_checks_metadata.get(check)
|
||||
if check_metadata:
|
||||
bulk_checks_metadata[check] = update_check_metadata(
|
||||
check_metadata, custom_metadata
|
||||
)
|
||||
return bulk_checks_metadata
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def update_check_metadata(check_metadata, custom_metadata):
|
||||
"""update_check_metadata updates the check_metadata fields present in the custom_metadata and returns the updated version of the check_metadata. If some field is not present or valid the check_metadata is returned with the original fields."""
|
||||
try:
|
||||
if custom_metadata:
|
||||
for attribute in custom_metadata:
|
||||
try:
|
||||
setattr(check_metadata, attribute, custom_metadata[attribute])
|
||||
except ValueError:
|
||||
pass
|
||||
finally:
|
||||
return check_metadata
|
||||
@@ -1,10 +1,12 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pydantic import BaseModel, ValidationError
|
||||
from pydantic import BaseModel, ValidationError, validator
|
||||
|
||||
from prowler.config.config import valid_severities
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
@@ -56,6 +58,29 @@ class Check_Metadata_Model(BaseModel):
|
||||
# store the compliance later if supplied
|
||||
Compliance: list = None
|
||||
|
||||
@validator("Categories", each_item=True, pre=True, always=True)
|
||||
def valid_category(value):
|
||||
if not isinstance(value, str):
|
||||
raise ValueError("Categories must be a list of strings")
|
||||
value_lower = value.lower()
|
||||
if not re.match("^[a-z-]+$", value_lower):
|
||||
raise ValueError(
|
||||
f"Invalid category: {value}. Categories can only contain lowercase letters and hyphen '-'"
|
||||
)
|
||||
return value_lower
|
||||
|
||||
@validator("Severity", pre=True, always=True)
|
||||
def severity_to_lower(severity):
|
||||
return severity.lower()
|
||||
|
||||
@validator("Severity")
|
||||
def valid_severity(severity):
|
||||
if severity not in valid_severities:
|
||||
raise ValueError(
|
||||
f"Invalid severity: {severity}. Severity must be one of {', '.join(valid_severities)}"
|
||||
)
|
||||
return severity
|
||||
|
||||
|
||||
class Check(ABC, Check_Metadata_Model):
|
||||
"""Prowler Check"""
|
||||
|
||||
@@ -7,6 +7,7 @@ from prowler.config.config import (
|
||||
check_current_version,
|
||||
default_config_file_path,
|
||||
default_output_directory,
|
||||
valid_severities,
|
||||
)
|
||||
from prowler.providers.common.arguments import (
|
||||
init_providers_parser,
|
||||
@@ -49,6 +50,7 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
self.__init_exclude_checks_parser__()
|
||||
self.__init_list_checks_parser__()
|
||||
self.__init_config_parser__()
|
||||
self.__init_custom_checks_metadata_parser__()
|
||||
|
||||
# Init Providers Arguments
|
||||
init_providers_parser(self)
|
||||
@@ -220,11 +222,11 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
group.add_argument(
|
||||
"-s", "--services", nargs="+", help="List of services to be executed."
|
||||
)
|
||||
group.add_argument(
|
||||
common_checks_parser.add_argument(
|
||||
"--severity",
|
||||
nargs="+",
|
||||
help="List of severities to be executed [informational, low, medium, high, critical]",
|
||||
choices=["informational", "low", "medium", "high", "critical"],
|
||||
help=f"List of severities to be executed {valid_severities}",
|
||||
choices=valid_severities,
|
||||
)
|
||||
group.add_argument(
|
||||
"--compliance",
|
||||
@@ -286,3 +288,15 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
default=default_config_file_path,
|
||||
help="Set configuration file path",
|
||||
)
|
||||
|
||||
def __init_custom_checks_metadata_parser__(self):
|
||||
# CustomChecksMetadata
|
||||
custom_checks_metadata_subparser = (
|
||||
self.common_providers_parser.add_argument_group("Custom Checks Metadata")
|
||||
)
|
||||
custom_checks_metadata_subparser.add_argument(
|
||||
"--custom-checks-metadata-file",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="Path for the custom checks metadata YAML file. See example prowler/config/custom_checks_metadata_example.yaml for reference and format. See more in https://docs.prowler.cloud/en/latest/tutorials/custom-checks-metadata/",
|
||||
)
|
||||
|
||||
@@ -330,7 +330,7 @@ def fill_compliance(output_options, finding, audit_info, file_descriptors):
|
||||
Requirements_Attributes_SubSection=attribute.SubSection,
|
||||
Requirements_Attributes_SubGroup=attribute.SubGroup,
|
||||
Requirements_Attributes_Service=attribute.Service,
|
||||
Requirements_Attributes_Soc_Type=attribute.Soc_Type,
|
||||
Requirements_Attributes_Type=attribute.Type,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
@@ -401,7 +401,8 @@ def display_compliance_table(
|
||||
"Bajo": 0,
|
||||
}
|
||||
if finding.status == "FAIL":
|
||||
fail_count += 1
|
||||
if attribute.Tipo != "recomendacion":
|
||||
fail_count += 1
|
||||
marcos[marco_categoria][
|
||||
"Estado"
|
||||
] = f"{Fore.RED}NO CUMPLE{Style.RESET_ALL}"
|
||||
@@ -443,8 +444,8 @@ def display_compliance_table(
|
||||
)
|
||||
overview_table = [
|
||||
[
|
||||
f"{Fore.RED}{round(fail_count/(fail_count+pass_count)*100, 2)}% ({fail_count}) NO CUMPLE{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count/(fail_count+pass_count)*100, 2)}% ({pass_count}) CUMPLE{Style.RESET_ALL}",
|
||||
f"{Fore.RED}{round(fail_count / (fail_count + pass_count) * 100, 2)}% ({fail_count}) NO CUMPLE{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count / (fail_count + pass_count) * 100, 2)}% ({pass_count}) CUMPLE{Style.RESET_ALL}",
|
||||
]
|
||||
]
|
||||
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
||||
@@ -538,8 +539,8 @@ def display_compliance_table(
|
||||
)
|
||||
overview_table = [
|
||||
[
|
||||
f"{Fore.RED}{round(fail_count/(fail_count+pass_count)*100, 2)}% ({fail_count}) FAIL{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count/(fail_count+pass_count)*100, 2)}% ({pass_count}) PASS{Style.RESET_ALL}",
|
||||
f"{Fore.RED}{round(fail_count / (fail_count + pass_count) * 100, 2)}% ({fail_count}) FAIL{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count / (fail_count + pass_count) * 100, 2)}% ({pass_count}) PASS{Style.RESET_ALL}",
|
||||
]
|
||||
]
|
||||
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
||||
@@ -609,8 +610,8 @@ def display_compliance_table(
|
||||
)
|
||||
overview_table = [
|
||||
[
|
||||
f"{Fore.RED}{round(fail_count/(fail_count+pass_count)*100, 2)}% ({fail_count}) FAIL{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count/(fail_count+pass_count)*100, 2)}% ({pass_count}) PASS{Style.RESET_ALL}",
|
||||
f"{Fore.RED}{round(fail_count / (fail_count + pass_count) * 100, 2)}% ({fail_count}) FAIL{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count / (fail_count + pass_count) * 100, 2)}% ({pass_count}) PASS{Style.RESET_ALL}",
|
||||
]
|
||||
]
|
||||
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
||||
|
||||
@@ -12,8 +12,6 @@ from prowler.config.config import (
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.html import add_html_header
|
||||
from prowler.lib.outputs.models import (
|
||||
Aws_Check_Output_CSV,
|
||||
Azure_Check_Output_CSV,
|
||||
Check_Output_CSV_AWS_CIS,
|
||||
Check_Output_CSV_AWS_ISO27001_2013,
|
||||
Check_Output_CSV_AWS_Well_Architected,
|
||||
@@ -21,19 +19,18 @@ from prowler.lib.outputs.models import (
|
||||
Check_Output_CSV_GCP_CIS,
|
||||
Check_Output_CSV_Generic_Compliance,
|
||||
Check_Output_MITRE_ATTACK,
|
||||
Gcp_Check_Output_CSV,
|
||||
generate_csv_fields,
|
||||
)
|
||||
from prowler.lib.utils.utils import file_exists, open_file
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
|
||||
from prowler.providers.common.outputs import get_provider_output_model
|
||||
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
|
||||
|
||||
|
||||
def initialize_file_descriptor(
|
||||
filename: str,
|
||||
output_mode: str,
|
||||
audit_info: AWS_Audit_Info,
|
||||
audit_info: Any,
|
||||
format: Any = None,
|
||||
) -> TextIOWrapper:
|
||||
"""Open/Create the output file. If needed include headers or the required format"""
|
||||
@@ -75,27 +72,15 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
for output_mode in output_modes:
|
||||
if output_mode == "csv":
|
||||
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
|
||||
if isinstance(audit_info, AWS_Audit_Info):
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Aws_Check_Output_CSV,
|
||||
)
|
||||
if isinstance(audit_info, Azure_Audit_Info):
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Azure_Check_Output_CSV,
|
||||
)
|
||||
if isinstance(audit_info, GCP_Audit_Info):
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Gcp_Check_Output_CSV,
|
||||
)
|
||||
output_model = get_provider_output_model(
|
||||
audit_info.__class__.__name__
|
||||
)
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
output_model,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "json":
|
||||
@@ -122,8 +107,8 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif isinstance(audit_info, GCP_Audit_Info):
|
||||
if output_mode == "cis_2.0_gcp":
|
||||
filename = f"{output_directory}/{output_filename}_cis_2.0_gcp{csv_file_suffix}"
|
||||
filename = f"{output_directory}/{output_filename}_{output_mode}{csv_file_suffix}"
|
||||
if "cis_" in output_mode:
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info, Check_Output_CSV_GCP_CIS
|
||||
)
|
||||
@@ -136,87 +121,62 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
filename, output_mode, audit_info
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "ens_rd2022_aws":
|
||||
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV_ENS_RD2022,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "cis_1.5_aws":
|
||||
filename = f"{output_directory}/{output_filename}_cis_1.5_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info, Check_Output_CSV_AWS_CIS
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "cis_1.4_aws":
|
||||
filename = f"{output_directory}/{output_filename}_cis_1.4_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info, Check_Output_CSV_AWS_CIS
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif (
|
||||
output_mode
|
||||
== "aws_well_architected_framework_security_pillar_aws"
|
||||
):
|
||||
filename = f"{output_directory}/{output_filename}_aws_well_architected_framework_security_pillar_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV_AWS_Well_Architected,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif (
|
||||
output_mode
|
||||
== "aws_well_architected_framework_reliability_pillar_aws"
|
||||
):
|
||||
filename = f"{output_directory}/{output_filename}_aws_well_architected_framework_reliability_pillar_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV_AWS_Well_Architected,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "iso27001_2013_aws":
|
||||
filename = f"{output_directory}/{output_filename}_iso27001_2013_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV_AWS_ISO27001_2013,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "mitre_attack_aws":
|
||||
filename = f"{output_directory}/{output_filename}_mitre_attack_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_MITRE_ATTACK,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
else:
|
||||
# Generic Compliance framework
|
||||
else: # Compliance frameworks
|
||||
filename = f"{output_directory}/{output_filename}_{output_mode}{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV_Generic_Compliance,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
if output_mode == "ens_rd2022_aws":
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV_ENS_RD2022,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif "cis_" in output_mode:
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV_AWS_CIS,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif "aws_well_architected_framework" in output_mode:
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV_AWS_Well_Architected,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "iso27001_2013_aws":
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV_AWS_ISO27001_2013,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "mitre_attack_aws":
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_MITRE_ATTACK,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
else:
|
||||
# Generic Compliance framework
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Check_Output_CSV_Generic_Compliance,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
|
||||
@@ -338,8 +338,9 @@ def add_html_footer(output_filename, output_directory):
|
||||
def get_aws_html_assessment_summary(audit_info):
|
||||
try:
|
||||
if isinstance(audit_info, AWS_Audit_Info):
|
||||
if not audit_info.profile:
|
||||
audit_info.profile = "ENV"
|
||||
profile = (
|
||||
audit_info.profile if audit_info.profile is not None else "default"
|
||||
)
|
||||
if isinstance(audit_info.audited_regions, list):
|
||||
audited_regions = " ".join(audit_info.audited_regions)
|
||||
elif not audit_info.audited_regions:
|
||||
@@ -361,7 +362,7 @@ def get_aws_html_assessment_summary(audit_info):
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>AWS-CLI Profile:</b> """
|
||||
+ audit_info.profile
|
||||
+ profile
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
@@ -406,7 +407,7 @@ def get_azure_html_assessment_summary(audit_info):
|
||||
if isinstance(audit_info, Azure_Audit_Info):
|
||||
printed_subscriptions = []
|
||||
for key, value in audit_info.identity.subscriptions.items():
|
||||
intermediate = key + " : " + value
|
||||
intermediate = f"{key} : {value}"
|
||||
printed_subscriptions.append(intermediate)
|
||||
|
||||
# check if identity is str(coming from SP) or dict(coming from browser or)
|
||||
|
||||
@@ -31,6 +31,7 @@ from prowler.lib.outputs.models import (
|
||||
unroll_dict_to_list,
|
||||
)
|
||||
from prowler.lib.utils.utils import hash_sha512, open_file, outputs_unix_timestamp
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
|
||||
|
||||
def fill_json_asff(finding_output, audit_info, finding, output_options):
|
||||
@@ -50,9 +51,9 @@ def fill_json_asff(finding_output, audit_info, finding, output_options):
|
||||
finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID
|
||||
finding_output.AwsAccountId = audit_info.audited_account
|
||||
finding_output.Types = finding.check_metadata.CheckType
|
||||
finding_output.FirstObservedAt = (
|
||||
finding_output.UpdatedAt
|
||||
) = finding_output.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
finding_output.FirstObservedAt = finding_output.UpdatedAt = (
|
||||
finding_output.CreatedAt
|
||||
) = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
finding_output.Severity = Severity(
|
||||
Label=finding.check_metadata.Severity.upper()
|
||||
)
|
||||
@@ -155,6 +156,11 @@ def fill_json_ocsf(audit_info, finding, output_options) -> Check_Output_JSON_OCS
|
||||
aws_org_uid = ""
|
||||
account = None
|
||||
org = None
|
||||
profile = ""
|
||||
if isinstance(audit_info, AWS_Audit_Info):
|
||||
profile = (
|
||||
audit_info.profile if audit_info.profile is not None else "default"
|
||||
)
|
||||
if (
|
||||
hasattr(audit_info, "organizations_metadata")
|
||||
and audit_info.organizations_metadata
|
||||
@@ -249,9 +255,7 @@ def fill_json_ocsf(audit_info, finding, output_options) -> Check_Output_JSON_OCS
|
||||
original_time=outputs_unix_timestamp(
|
||||
output_options.unix_timestamp, timestamp
|
||||
),
|
||||
profiles=[audit_info.profile]
|
||||
if hasattr(audit_info, "organizations_metadata")
|
||||
else [],
|
||||
profiles=[profile],
|
||||
)
|
||||
compliance = Compliance_OCSF(
|
||||
status=generate_json_ocsf_status(finding.status),
|
||||
|
||||
@@ -55,9 +55,9 @@ def generate_provider_output_csv(
|
||||
data["resource_name"] = finding.resource_name
|
||||
data["subscription"] = finding.subscription
|
||||
data["tenant_domain"] = audit_info.identity.domain
|
||||
data[
|
||||
"finding_unique_id"
|
||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.subscription}-{finding.resource_id}"
|
||||
data["finding_unique_id"] = (
|
||||
f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.subscription}-{finding.resource_id}"
|
||||
)
|
||||
data["compliance"] = unroll_dict(
|
||||
get_check_compliance(finding, provider, output_options)
|
||||
)
|
||||
@@ -68,9 +68,9 @@ def generate_provider_output_csv(
|
||||
data["resource_name"] = finding.resource_name
|
||||
data["project_id"] = finding.project_id
|
||||
data["location"] = finding.location.lower()
|
||||
data[
|
||||
"finding_unique_id"
|
||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
|
||||
data["finding_unique_id"] = (
|
||||
f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
|
||||
)
|
||||
data["compliance"] = unroll_dict(
|
||||
get_check_compliance(finding, provider, output_options)
|
||||
)
|
||||
@@ -82,9 +82,9 @@ def generate_provider_output_csv(
|
||||
data["region"] = finding.region
|
||||
data["resource_id"] = finding.resource_id
|
||||
data["resource_arn"] = finding.resource_arn
|
||||
data[
|
||||
"finding_unique_id"
|
||||
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{finding.resource_id}"
|
||||
data["finding_unique_id"] = (
|
||||
f"prowler-{provider}-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{finding.resource_id}"
|
||||
)
|
||||
data["compliance"] = unroll_dict(
|
||||
get_check_compliance(finding, provider, output_options)
|
||||
)
|
||||
@@ -614,8 +614,8 @@ class Check_Output_CSV_Generic_Compliance(BaseModel):
|
||||
Requirements_Attributes_Section: Optional[str]
|
||||
Requirements_Attributes_SubSection: Optional[str]
|
||||
Requirements_Attributes_SubGroup: Optional[str]
|
||||
Requirements_Attributes_Service: str
|
||||
Requirements_Attributes_Soc_Type: Optional[str]
|
||||
Requirements_Attributes_Service: Optional[str]
|
||||
Requirements_Attributes_Type: Optional[str]
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
|
||||
@@ -13,7 +13,7 @@ def send_slack_message(token, channel, stats, provider, audit_info):
|
||||
response = client.chat_postMessage(
|
||||
username="Prowler",
|
||||
icon_url=square_logo_img,
|
||||
channel="#" + channel,
|
||||
channel=f"#{channel}",
|
||||
blocks=create_message_blocks(identity, logo, stats),
|
||||
)
|
||||
return response
|
||||
@@ -35,7 +35,7 @@ def create_message_identity(provider, audit_info):
|
||||
elif provider == "azure":
|
||||
printed_subscriptions = []
|
||||
for key, value in audit_info.identity.subscriptions.items():
|
||||
intermediate = "- *" + key + ": " + value + "*\n"
|
||||
intermediate = f"- *{key}: {value}*\n"
|
||||
printed_subscriptions.append(intermediate)
|
||||
identity = f"Azure Subscriptions:\n{''.join(printed_subscriptions)}"
|
||||
logo = azure_logo
|
||||
@@ -66,14 +66,14 @@ def create_message_blocks(identity, logo, stats):
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": f"\n:white_check_mark: *{stats['total_pass']} Passed findings* ({round(stats['total_pass']/stats['findings_count']*100,2)}%)\n",
|
||||
"text": f"\n:white_check_mark: *{stats['total_pass']} Passed findings* ({round(stats['total_pass'] / stats['findings_count'] * 100 , 2)}%)\n",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": f"\n:x: *{stats['total_fail']} Failed findings* ({round(stats['total_fail']/stats['findings_count']*100,2)}%)\n ",
|
||||
"text": f"\n:x: *{stats['total_fail']} Failed findings* ({round(stats['total_fail'] / stats['findings_count'] * 100 , 2)}%)\n ",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -96,8 +96,8 @@ def display_summary_table(
|
||||
print("\nOverview Results:")
|
||||
overview_table = [
|
||||
[
|
||||
f"{Fore.RED}{round(fail_count/len(findings)*100, 2)}% ({fail_count}) Failed{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count/len(findings)*100, 2)}% ({pass_count}) Passed{Style.RESET_ALL}",
|
||||
f"{Fore.RED}{round(fail_count / len(findings) * 100, 2)}% ({fail_count}) Failed{Style.RESET_ALL}",
|
||||
f"{Fore.GREEN}{round(pass_count / len(findings) * 100, 2)}% ({pass_count}) Passed{Style.RESET_ALL}",
|
||||
]
|
||||
]
|
||||
print(tabulate(overview_table, tablefmt="rounded_grid"))
|
||||
|
||||
@@ -10,7 +10,10 @@ from prowler.config.config import aws_services_json_file
|
||||
from prowler.lib.check.check import list_modules, recover_checks_from_service
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.utils.utils import open_file, parse_json_file
|
||||
from prowler.providers.aws.config import AWS_STS_GLOBAL_ENDPOINT_REGION
|
||||
from prowler.providers.aws.config import (
|
||||
AWS_STS_GLOBAL_ENDPOINT_REGION,
|
||||
ROLE_SESSION_NAME,
|
||||
)
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Audit_Info
|
||||
from prowler.providers.aws.lib.credentials.credentials import create_sts_session
|
||||
|
||||
@@ -30,6 +33,9 @@ class AWS_Provider:
|
||||
# If we receive a credentials object filled is coming form an assumed role, so renewal is needed
|
||||
if audit_info.credentials:
|
||||
logger.info("Creating session for assumed role ...")
|
||||
# FIXME: Boto3 returns the timestamp in UTC and the local TZ could be different so the expiration time could not work as expected
|
||||
# PRWLR-3305
|
||||
|
||||
# From botocore we can use RefreshableCredentials class, which has an attribute (refresh_using)
|
||||
# that needs to be a method without arguments that retrieves a new set of fresh credentials
|
||||
# asuming the role again. -> https://github.com/boto/botocore/blob/098cc255f81a25b852e1ecdeb7adebd94c7b1b73/botocore/credentials.py#L395
|
||||
@@ -113,9 +119,15 @@ def assume_role(
|
||||
sts_endpoint_region: str = None,
|
||||
) -> dict:
|
||||
try:
|
||||
role_session_name = (
|
||||
assumed_role_info.role_session_name
|
||||
if assumed_role_info.role_session_name
|
||||
else ROLE_SESSION_NAME
|
||||
)
|
||||
|
||||
assume_role_arguments = {
|
||||
"RoleArn": assumed_role_info.role_arn,
|
||||
"RoleSessionName": "ProwlerAsessmentSession",
|
||||
"RoleSessionName": role_session_name,
|
||||
"DurationSeconds": assumed_role_info.session_duration,
|
||||
}
|
||||
|
||||
@@ -152,23 +164,31 @@ def input_role_mfa_token_and_code() -> tuple[str]:
|
||||
|
||||
|
||||
def generate_regional_clients(
|
||||
service: str, audit_info: AWS_Audit_Info, global_service: bool = False
|
||||
service: str,
|
||||
audit_info: AWS_Audit_Info,
|
||||
) -> dict:
|
||||
"""generate_regional_clients returns a dict with the following format for the given service:
|
||||
|
||||
Example:
|
||||
{"eu-west-1": boto3_service_client}
|
||||
"""
|
||||
try:
|
||||
regional_clients = {}
|
||||
service_regions = get_available_aws_service_regions(service, audit_info)
|
||||
# Check if it is global service to gather only one region
|
||||
if global_service:
|
||||
if service_regions:
|
||||
if audit_info.profile_region in service_regions:
|
||||
service_regions = [audit_info.profile_region]
|
||||
service_regions = service_regions[:1]
|
||||
for region in service_regions:
|
||||
|
||||
# Get the regions enabled for the account and get the intersection with the service available regions
|
||||
if audit_info.enabled_regions:
|
||||
enabled_regions = service_regions.intersection(audit_info.enabled_regions)
|
||||
else:
|
||||
enabled_regions = service_regions
|
||||
|
||||
for region in enabled_regions:
|
||||
regional_client = audit_info.audit_session.client(
|
||||
service, region_name=region, config=audit_info.session_config
|
||||
)
|
||||
regional_client.region = region
|
||||
regional_clients[region] = regional_client
|
||||
|
||||
return regional_clients
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -176,6 +196,26 @@ def generate_regional_clients(
|
||||
)
|
||||
|
||||
|
||||
def get_aws_enabled_regions(audit_info: AWS_Audit_Info) -> set:
|
||||
"""get_aws_enabled_regions returns a set of enabled AWS regions"""
|
||||
|
||||
# EC2 Client to check enabled regions
|
||||
service = "ec2"
|
||||
default_region = get_default_region(service, audit_info)
|
||||
ec2_client = audit_info.audit_session.client(service, region_name=default_region)
|
||||
|
||||
enabled_regions = set()
|
||||
try:
|
||||
# With AllRegions=False we only get the enabled regions for the account
|
||||
for region in ec2_client.describe_regions(AllRegions=False).get("Regions", []):
|
||||
enabled_regions.add(region.get("RegionName"))
|
||||
except Exception as error:
|
||||
logger.warning(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return enabled_regions
|
||||
|
||||
|
||||
def get_aws_available_regions():
|
||||
try:
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
@@ -216,6 +256,8 @@ def get_checks_from_input_arn(audit_resources: list, provider: str) -> set:
|
||||
service = "efs"
|
||||
elif service == "logs":
|
||||
service = "cloudwatch"
|
||||
elif service == "cognito":
|
||||
service = "cognito-idp"
|
||||
# Check if Prowler has checks in service
|
||||
try:
|
||||
list_modules(provider, service)
|
||||
@@ -267,17 +309,18 @@ def get_regions_from_audit_resources(audit_resources: list) -> set:
|
||||
return audited_regions
|
||||
|
||||
|
||||
def get_available_aws_service_regions(service: str, audit_info: AWS_Audit_Info) -> list:
|
||||
def get_available_aws_service_regions(service: str, audit_info: AWS_Audit_Info) -> set:
|
||||
# Get json locally
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
with open_file(f"{actual_directory}/{aws_services_json_file}") as f:
|
||||
data = parse_json_file(f)
|
||||
# Check if it is a subservice
|
||||
json_regions = data["services"][service]["regions"][audit_info.audited_partition]
|
||||
if audit_info.audited_regions: # Check for input aws audit_info.audited_regions
|
||||
regions = list(
|
||||
set(json_regions).intersection(audit_info.audited_regions)
|
||||
) # Get common regions between input and json
|
||||
json_regions = set(
|
||||
data["services"][service]["regions"][audit_info.audited_partition]
|
||||
)
|
||||
# Check for input aws audit_info.audited_regions
|
||||
if audit_info.audited_regions:
|
||||
# Get common regions between input and json
|
||||
regions = json_regions.intersection(audit_info.audited_regions)
|
||||
else: # Get all regions from json of the service and partition
|
||||
regions = json_regions
|
||||
return regions
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
AWS_STS_GLOBAL_ENDPOINT_REGION = "us-east-1"
|
||||
BOTO3_USER_AGENT_EXTRA = "APN_1826889"
|
||||
ROLE_SESSION_NAME = "ProwlerAssessmentSession"
|
||||
|
||||
@@ -135,32 +135,31 @@ def allowlist_findings(
|
||||
|
||||
|
||||
def is_allowlisted(
|
||||
allowlist: dict, audited_account: str, check: str, region: str, resource: str, tags
|
||||
allowlist: dict,
|
||||
audited_account: str,
|
||||
check: str,
|
||||
finding_region: str,
|
||||
finding_resource: str,
|
||||
finding_tags,
|
||||
):
|
||||
try:
|
||||
allowlisted_checks = {}
|
||||
# By default is not allowlisted
|
||||
is_finding_allowlisted = False
|
||||
# First set account key from allowlist dict
|
||||
if audited_account in allowlist["Accounts"]:
|
||||
allowlisted_checks = allowlist["Accounts"][audited_account]["Checks"]
|
||||
# If there is a *, it affects to all accounts
|
||||
# This cannot be elif since in the case of * and single accounts we
|
||||
# want to merge allowlisted checks from * to the other accounts check list
|
||||
if "*" in allowlist["Accounts"]:
|
||||
checks_multi_account = allowlist["Accounts"]["*"]["Checks"]
|
||||
allowlisted_checks.update(checks_multi_account)
|
||||
# Test if it is allowlisted
|
||||
if is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
audited_account,
|
||||
audited_account,
|
||||
check,
|
||||
region,
|
||||
resource,
|
||||
tags,
|
||||
):
|
||||
is_finding_allowlisted = True
|
||||
|
||||
# We always check all the accounts present in the allowlist
|
||||
# if one allowlists the finding we set the finding as allowlisted
|
||||
for account in allowlist["Accounts"]:
|
||||
if account == audited_account or account == "*":
|
||||
if is_allowlisted_in_check(
|
||||
allowlist["Accounts"][account]["Checks"],
|
||||
audited_account,
|
||||
check,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
is_finding_allowlisted = True
|
||||
break
|
||||
|
||||
return is_finding_allowlisted
|
||||
except Exception as error:
|
||||
@@ -171,43 +170,69 @@ def is_allowlisted(
|
||||
|
||||
|
||||
def is_allowlisted_in_check(
|
||||
allowlisted_checks, audited_account, account, check, region, resource, tags
|
||||
allowlisted_checks,
|
||||
audited_account,
|
||||
check,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
try:
|
||||
# Default value is not allowlisted
|
||||
is_check_allowlisted = False
|
||||
|
||||
for allowlisted_check, allowlisted_check_info in allowlisted_checks.items():
|
||||
# map lambda to awslambda
|
||||
allowlisted_check = re.sub("^lambda", "awslambda", allowlisted_check)
|
||||
# extract the exceptions
|
||||
|
||||
# Check if the finding is excepted
|
||||
exceptions = allowlisted_check_info.get("Exceptions")
|
||||
# Check if there are exceptions
|
||||
if is_excepted(
|
||||
exceptions,
|
||||
audited_account,
|
||||
region,
|
||||
resource,
|
||||
tags,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
# Break loop and return default value since is excepted
|
||||
break
|
||||
|
||||
allowlisted_regions = allowlisted_check_info.get("Regions")
|
||||
allowlisted_resources = allowlisted_check_info.get("Resources")
|
||||
allowlisted_tags = allowlisted_check_info.get("Tags")
|
||||
allowlisted_tags = allowlisted_check_info.get("Tags", "*")
|
||||
# We need to set the allowlisted_tags if None, "" or [], so the falsy helps
|
||||
if not allowlisted_tags:
|
||||
allowlisted_tags = "*"
|
||||
|
||||
# If there is a *, it affects to all checks
|
||||
if (
|
||||
"*" == allowlisted_check
|
||||
or check == allowlisted_check
|
||||
or re.search(allowlisted_check, check)
|
||||
):
|
||||
if is_allowlisted_in_region(
|
||||
allowlisted_regions,
|
||||
allowlisted_resources,
|
||||
allowlisted_tags,
|
||||
region,
|
||||
resource,
|
||||
tags,
|
||||
allowlisted_in_check = True
|
||||
allowlisted_in_region = is_allowlisted_in_region(
|
||||
allowlisted_regions, finding_region
|
||||
)
|
||||
allowlisted_in_resource = is_allowlisted_in_resource(
|
||||
allowlisted_resources, finding_resource
|
||||
)
|
||||
allowlisted_in_tags = is_allowlisted_in_tags(
|
||||
allowlisted_tags, finding_tags
|
||||
)
|
||||
|
||||
# For a finding to be allowlisted requires the following set to True:
|
||||
# - allowlisted_in_check -> True
|
||||
# - allowlisted_in_region -> True
|
||||
# - allowlisted_in_tags -> True
|
||||
# - allowlisted_in_resource -> True
|
||||
# - excepted -> False
|
||||
|
||||
if (
|
||||
allowlisted_in_check
|
||||
and allowlisted_in_region
|
||||
and allowlisted_in_tags
|
||||
and allowlisted_in_resource
|
||||
):
|
||||
is_check_allowlisted = True
|
||||
|
||||
@@ -220,25 +245,11 @@ def is_allowlisted_in_check(
|
||||
|
||||
|
||||
def is_allowlisted_in_region(
|
||||
allowlist_regions, allowlist_resources, allowlisted_tags, region, resource, tags
|
||||
allowlisted_regions,
|
||||
finding_region,
|
||||
):
|
||||
try:
|
||||
# By default is not allowlisted
|
||||
is_region_allowlisted = False
|
||||
# If there is a *, it affects to all regions
|
||||
if "*" in allowlist_regions or region in allowlist_regions:
|
||||
for elem in allowlist_resources:
|
||||
if is_allowlisted_in_tags(
|
||||
allowlisted_tags,
|
||||
elem,
|
||||
resource,
|
||||
tags,
|
||||
):
|
||||
is_region_allowlisted = True
|
||||
# if we find the element there is no point in continuing with the loop
|
||||
break
|
||||
|
||||
return is_region_allowlisted
|
||||
return __is_item_matched__(allowlisted_regions, finding_region)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
@@ -246,25 +257,9 @@ def is_allowlisted_in_region(
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_allowlisted_in_tags(allowlisted_tags, elem, resource, tags):
|
||||
def is_allowlisted_in_tags(allowlisted_tags, finding_tags):
|
||||
try:
|
||||
# By default is not allowlisted
|
||||
is_tag_allowlisted = False
|
||||
# Check if it is an *
|
||||
if elem == "*":
|
||||
elem = ".*"
|
||||
# Check if there are allowlisted tags
|
||||
if allowlisted_tags:
|
||||
for allowlisted_tag in allowlisted_tags:
|
||||
if re.search(allowlisted_tag, tags):
|
||||
is_tag_allowlisted = True
|
||||
break
|
||||
|
||||
else:
|
||||
if re.search(elem, resource):
|
||||
is_tag_allowlisted = True
|
||||
|
||||
return is_tag_allowlisted
|
||||
return __is_item_matched__(allowlisted_tags, finding_tags)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
@@ -272,7 +267,25 @@ def is_allowlisted_in_tags(allowlisted_tags, elem, resource, tags):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_excepted(exceptions, audited_account, region, resource, tags):
|
||||
def is_allowlisted_in_resource(allowlisted_resources, finding_resource):
|
||||
try:
|
||||
return __is_item_matched__(allowlisted_resources, finding_resource)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_excepted(
|
||||
exceptions,
|
||||
audited_account,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
"""is_excepted returns True if the account, region, resource and tags are excepted"""
|
||||
try:
|
||||
excepted = False
|
||||
is_account_excepted = False
|
||||
@@ -281,39 +294,57 @@ def is_excepted(exceptions, audited_account, region, resource, tags):
|
||||
is_tag_excepted = False
|
||||
if exceptions:
|
||||
excepted_accounts = exceptions.get("Accounts", [])
|
||||
is_account_excepted = __is_item_matched__(
|
||||
excepted_accounts, audited_account
|
||||
)
|
||||
|
||||
excepted_regions = exceptions.get("Regions", [])
|
||||
is_region_excepted = __is_item_matched__(excepted_regions, finding_region)
|
||||
|
||||
excepted_resources = exceptions.get("Resources", [])
|
||||
is_resource_excepted = __is_item_matched__(
|
||||
excepted_resources, finding_resource
|
||||
)
|
||||
|
||||
excepted_tags = exceptions.get("Tags", [])
|
||||
if exceptions:
|
||||
if audited_account in excepted_accounts:
|
||||
is_account_excepted = True
|
||||
if region in excepted_regions:
|
||||
is_region_excepted = True
|
||||
for excepted_resource in excepted_resources:
|
||||
if re.search(excepted_resource, resource):
|
||||
is_resource_excepted = True
|
||||
for tag in excepted_tags:
|
||||
if tag in tags:
|
||||
is_tag_excepted = True
|
||||
if (
|
||||
(
|
||||
(excepted_accounts and is_account_excepted)
|
||||
or not excepted_accounts
|
||||
)
|
||||
and (
|
||||
(excepted_regions and is_region_excepted)
|
||||
or not excepted_regions
|
||||
)
|
||||
and (
|
||||
(excepted_resources and is_resource_excepted)
|
||||
or not excepted_resources
|
||||
)
|
||||
and ((excepted_tags and is_tag_excepted) or not excepted_tags)
|
||||
):
|
||||
excepted = True
|
||||
is_tag_excepted = __is_item_matched__(excepted_tags, finding_tags)
|
||||
|
||||
if (
|
||||
not is_account_excepted
|
||||
and not is_region_excepted
|
||||
and not is_resource_excepted
|
||||
and not is_tag_excepted
|
||||
):
|
||||
excepted = False
|
||||
elif (
|
||||
(is_account_excepted or not excepted_accounts)
|
||||
and (is_region_excepted or not excepted_regions)
|
||||
and (is_resource_excepted or not excepted_resources)
|
||||
and (is_tag_excepted or not excepted_tags)
|
||||
):
|
||||
excepted = True
|
||||
return excepted
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def __is_item_matched__(matched_items, finding_items):
|
||||
"""__is_item_matched__ return True if any of the matched_items are present in the finding_items, otherwise returns False."""
|
||||
try:
|
||||
is_item_matched = False
|
||||
if matched_items and (finding_items or finding_items == ""):
|
||||
for item in matched_items:
|
||||
if item == "*":
|
||||
item = ".*"
|
||||
if re.search(item, finding_items):
|
||||
is_item_matched = True
|
||||
break
|
||||
return is_item_matched
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from argparse import ArgumentTypeError, Namespace
|
||||
from re import fullmatch, search
|
||||
|
||||
from prowler.providers.aws.aws_provider import get_aws_available_regions
|
||||
from prowler.providers.aws.config import ROLE_SESSION_NAME
|
||||
from prowler.providers.aws.lib.arn.arn import arn_type
|
||||
|
||||
|
||||
@@ -26,6 +28,13 @@ def init_parser(self):
|
||||
help="ARN of the role to be assumed",
|
||||
# Pending ARN validation
|
||||
)
|
||||
aws_auth_subparser.add_argument(
|
||||
"--role-session-name",
|
||||
nargs="?",
|
||||
default=ROLE_SESSION_NAME,
|
||||
help="An identifier for the assumed role session. Defaults to ProwlerAssessmentSession",
|
||||
type=validate_role_session_name,
|
||||
)
|
||||
aws_auth_subparser.add_argument(
|
||||
"--sts-endpoint-region",
|
||||
nargs="?",
|
||||
@@ -84,6 +93,11 @@ def init_parser(self):
|
||||
action="store_true",
|
||||
help="Skip updating previous findings of Prowler in Security Hub",
|
||||
)
|
||||
aws_security_hub_subparser.add_argument(
|
||||
"--send-sh-only-fails",
|
||||
action="store_true",
|
||||
help="Send only Prowler failed findings to SecurityHub",
|
||||
)
|
||||
# AWS Quick Inventory
|
||||
aws_quick_inventory_subparser = aws_parser.add_argument_group("Quick Inventory")
|
||||
aws_quick_inventory_subparser.add_argument(
|
||||
@@ -99,6 +113,7 @@ def init_parser(self):
|
||||
"-B",
|
||||
"--output-bucket",
|
||||
nargs="?",
|
||||
type=validate_bucket,
|
||||
default=None,
|
||||
help="Custom output bucket, requires -M <mode> and it can work also with -o flag.",
|
||||
)
|
||||
@@ -106,6 +121,7 @@ def init_parser(self):
|
||||
"-D",
|
||||
"--output-bucket-no-assume",
|
||||
nargs="?",
|
||||
type=validate_bucket,
|
||||
default=None,
|
||||
help="Same as -B but do not use the assumed role credentials to put objects to the bucket, instead uses the initial credentials.",
|
||||
)
|
||||
@@ -126,6 +142,7 @@ def init_parser(self):
|
||||
default=None,
|
||||
help="Path for allowlist yaml file. See example prowler/config/aws_allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table or Lambda ARNs or S3 URIs, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/",
|
||||
)
|
||||
|
||||
# Based Scans
|
||||
aws_based_scans_subparser = aws_parser.add_argument_group("AWS Based Scans")
|
||||
aws_based_scans_parser = aws_based_scans_subparser.add_mutually_exclusive_group()
|
||||
@@ -178,9 +195,37 @@ def validate_arguments(arguments: Namespace) -> tuple[bool, str]:
|
||||
|
||||
# Handle if session_duration is not the default value or external_id is set
|
||||
if (
|
||||
arguments.session_duration and arguments.session_duration != 3600
|
||||
) or arguments.external_id:
|
||||
(arguments.session_duration and arguments.session_duration != 3600)
|
||||
or arguments.external_id
|
||||
or arguments.role_session_name != ROLE_SESSION_NAME
|
||||
):
|
||||
if not arguments.role:
|
||||
return (False, "To use -I/-T options -R option is needed")
|
||||
return (
|
||||
False,
|
||||
"To use -I/--external-id, -T/--session-duration or --role-session-name options -R/--role option is needed",
|
||||
)
|
||||
|
||||
return (True, "")
|
||||
|
||||
|
||||
def validate_bucket(bucket_name):
|
||||
"""validate_bucket validates that the input bucket_name is valid"""
|
||||
if search("(?!(^xn--|.+-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$", bucket_name):
|
||||
return bucket_name
|
||||
else:
|
||||
raise ArgumentTypeError(
|
||||
"Bucket name must be valid (https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html)"
|
||||
)
|
||||
|
||||
|
||||
def validate_role_session_name(session_name):
|
||||
"""
|
||||
validates that the role session name is valid
|
||||
Documentation: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
|
||||
"""
|
||||
if fullmatch("[\w+=,.@-]{2,64}", session_name):
|
||||
return session_name
|
||||
else:
|
||||
raise ArgumentTypeError(
|
||||
"Role Session Name must be 2-64 characters long and consist only of upper- and lower-case alphanumeric characters with no spaces. You can also include underscores or any of the following characters: =,.@-"
|
||||
)
|
||||
|
||||
@@ -30,6 +30,7 @@ current_audit_info = AWS_Audit_Info(
|
||||
session_duration=None,
|
||||
external_id=None,
|
||||
mfa_enabled=None,
|
||||
role_session_name=None,
|
||||
),
|
||||
mfa_enabled=None,
|
||||
audit_resources=None,
|
||||
@@ -38,4 +39,5 @@ current_audit_info = AWS_Audit_Info(
|
||||
audit_metadata=None,
|
||||
audit_config=None,
|
||||
ignore_unused_services=False,
|
||||
enabled_regions=set(),
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
@@ -20,6 +20,7 @@ class AWS_Assume_Role:
|
||||
session_duration: int
|
||||
external_id: str
|
||||
mfa_enabled: bool
|
||||
role_session_name: str
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -53,3 +54,4 @@ class AWS_Audit_Info:
|
||||
audit_metadata: Optional[Any] = None
|
||||
audit_config: Optional[dict] = None
|
||||
ignore_unused_services: bool = False
|
||||
enabled_regions: set = field(default_factory=set)
|
||||
|
||||
@@ -1,40 +1,61 @@
|
||||
import sys
|
||||
|
||||
from boto3 import client
|
||||
from boto3 import client, session
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info
|
||||
|
||||
|
||||
def get_organizations_metadata(
|
||||
metadata_account: str, assumed_credentials: dict
|
||||
) -> AWS_Organizations_Info:
|
||||
aws_account_id: str,
|
||||
assumed_credentials: dict = None,
|
||||
session: session = None,
|
||||
) -> tuple[dict, dict]:
|
||||
try:
|
||||
organizations_client = client(
|
||||
"organizations",
|
||||
aws_access_key_id=assumed_credentials["Credentials"]["AccessKeyId"],
|
||||
aws_secret_access_key=assumed_credentials["Credentials"]["SecretAccessKey"],
|
||||
aws_session_token=assumed_credentials["Credentials"]["SessionToken"],
|
||||
)
|
||||
if assumed_credentials:
|
||||
organizations_client = client(
|
||||
"organizations",
|
||||
aws_access_key_id=assumed_credentials["Credentials"]["AccessKeyId"],
|
||||
aws_secret_access_key=assumed_credentials["Credentials"][
|
||||
"SecretAccessKey"
|
||||
],
|
||||
aws_session_token=assumed_credentials["Credentials"]["SessionToken"],
|
||||
)
|
||||
if session:
|
||||
organizations_client = session.client("organizations")
|
||||
else:
|
||||
organizations_client = client("organizations")
|
||||
|
||||
organizations_metadata = organizations_client.describe_account(
|
||||
AccountId=metadata_account
|
||||
AccountId=aws_account_id
|
||||
)
|
||||
list_tags_for_resource = organizations_client.list_tags_for_resource(
|
||||
ResourceId=metadata_account
|
||||
ResourceId=aws_account_id
|
||||
)
|
||||
|
||||
return organizations_metadata, list_tags_for_resource
|
||||
except Exception as error:
|
||||
logger.critical(f"{error.__class__.__name__} -- {error}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
logger.warning(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return {}, {}
|
||||
|
||||
|
||||
def parse_organizations_metadata(metadata: dict, tags: dict) -> AWS_Organizations_Info:
|
||||
try:
|
||||
# Convert Tags dictionary to String
|
||||
account_details_tags = ""
|
||||
for tag in list_tags_for_resource["Tags"]:
|
||||
for tag in tags.get("Tags", {}):
|
||||
account_details_tags += tag["Key"] + ":" + tag["Value"] + ","
|
||||
|
||||
account_details = metadata.get("Account", {})
|
||||
organizations_info = AWS_Organizations_Info(
|
||||
account_details_email=organizations_metadata["Account"]["Email"],
|
||||
account_details_name=organizations_metadata["Account"]["Name"],
|
||||
account_details_arn=organizations_metadata["Account"]["Arn"],
|
||||
account_details_org=organizations_metadata["Account"]["Arn"].split("/")[1],
|
||||
account_details_tags=account_details_tags,
|
||||
account_details_email=account_details.get("Email", ""),
|
||||
account_details_name=account_details.get("Name", ""),
|
||||
account_details_arn=account_details.get("Arn", ""),
|
||||
account_details_org=account_details.get("Arn", "").split("/")[1],
|
||||
account_details_tags=account_details_tags.rstrip(","),
|
||||
)
|
||||
return organizations_info
|
||||
except Exception as error:
|
||||
logger.warning(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
def is_account_only_allowed_in_condition(
|
||||
condition_statement: dict, source_account: str
|
||||
def is_condition_block_restrictive(
|
||||
condition_statement: dict, source_account: str, is_cross_account_allowed=False
|
||||
):
|
||||
"""
|
||||
is_account_only_allowed_in_condition parses the IAM Condition policy block and returns True if the source_account passed as argument is within, False if not.
|
||||
is_condition_block_restrictive parses the IAM Condition policy block and, by default, returns True if the source_account passed as argument is within, False if not.
|
||||
|
||||
If argument is_cross_account_allowed is True it tests if the Condition block includes any of the operators allowlisted returning True if does, False if not.
|
||||
|
||||
|
||||
@param condition_statement: dict with an IAM Condition block, e.g.:
|
||||
{
|
||||
@@ -54,23 +57,32 @@ def is_account_only_allowed_in_condition(
|
||||
condition_statement[condition_operator][value],
|
||||
list,
|
||||
):
|
||||
# if there is an arn/account without the source account -> we do not consider it safe
|
||||
# here by default we assume is true and look for false entries
|
||||
is_condition_valid = True
|
||||
for item in condition_statement[condition_operator][value]:
|
||||
if source_account not in item:
|
||||
is_condition_valid = False
|
||||
break
|
||||
is_condition_key_restrictive = True
|
||||
# if cross account is not allowed check for each condition block looking for accounts
|
||||
# different than default
|
||||
if not is_cross_account_allowed:
|
||||
# if there is an arn/account without the source account -> we do not consider it safe
|
||||
# here by default we assume is true and look for false entries
|
||||
for item in condition_statement[condition_operator][value]:
|
||||
if source_account not in item:
|
||||
is_condition_key_restrictive = False
|
||||
break
|
||||
|
||||
if is_condition_key_restrictive:
|
||||
is_condition_valid = True
|
||||
|
||||
# value is a string
|
||||
elif isinstance(
|
||||
condition_statement[condition_operator][value],
|
||||
str,
|
||||
):
|
||||
if (
|
||||
source_account
|
||||
in condition_statement[condition_operator][value]
|
||||
):
|
||||
if is_cross_account_allowed:
|
||||
is_condition_valid = True
|
||||
else:
|
||||
if (
|
||||
source_account
|
||||
in condition_statement[condition_operator][value]
|
||||
):
|
||||
is_condition_valid = True
|
||||
|
||||
return is_condition_valid
|
||||
|
||||
@@ -211,9 +211,13 @@ def create_inventory_table(resources: list, resources_in_region: dict) -> dict:
|
||||
|
||||
def create_output(resources: list, audit_info: AWS_Audit_Info, args):
|
||||
json_output = []
|
||||
output_file = (
|
||||
f"prowler-inventory-{audit_info.audited_account}-{output_file_timestamp}"
|
||||
)
|
||||
# Check if custom output filename was input, if not, set the default
|
||||
if not hasattr(args, "output_filename") or args.output_filename is None:
|
||||
output_file = (
|
||||
f"prowler-inventory-{audit_info.audited_account}-{output_file_timestamp}"
|
||||
)
|
||||
else:
|
||||
output_file = args.output_filename
|
||||
|
||||
for item in sorted(resources, key=lambda d: d["arn"]):
|
||||
resource = {}
|
||||
@@ -275,8 +279,8 @@ def create_output(resources: list, audit_info: AWS_Audit_Info, args):
|
||||
f"\n{Fore.YELLOW}WARNING: Only resources that have or have had tags will appear (except for IAM and S3).\nSee more in https://docs.prowler.cloud/en/latest/tutorials/quick-inventory/#objections{Style.RESET_ALL}"
|
||||
)
|
||||
print("\nMore details in files:")
|
||||
print(f" - CSV: {args.output_directory}/{output_file+csv_file_suffix}")
|
||||
print(f" - JSON: {args.output_directory}/{output_file+json_file_suffix}")
|
||||
print(f" - CSV: {args.output_directory}/{output_file + csv_file_suffix}")
|
||||
print(f" - JSON: {args.output_directory}/{output_file + json_file_suffix}")
|
||||
|
||||
# Send output to S3 if needed (-B / -D)
|
||||
for mode in ["json", "csv"]:
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import sys
|
||||
|
||||
from prowler.config.config import (
|
||||
csv_file_suffix,
|
||||
html_file_suffix,
|
||||
@@ -29,7 +27,7 @@ def send_to_s3_bucket(
|
||||
else: # Compliance output mode
|
||||
filename = f"{output_filename}_{output_mode}{csv_file_suffix}"
|
||||
|
||||
logger.info(f"Sending outputs to S3 bucket {output_bucket_name}")
|
||||
logger.info(f"Sending output file {filename} to S3 bucket {output_bucket_name}")
|
||||
# File location
|
||||
file_name = output_directory + "/" + filename
|
||||
|
||||
@@ -41,10 +39,9 @@ def send_to_s3_bucket(
|
||||
s3_client.upload_file(file_name, output_bucket_name, object_name)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_s3_object_path(output_directory: str) -> str:
|
||||
|
||||