Compare commits
123 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| bdae47d61b | |||
| e3a7d89948 | |||
| c7d7ec9a3b | |||
| 155a1813cc | |||
| 71e444d4ae | |||
| 42b7f0f1a9 | |||
| 5b3f0fbd7f | |||
| 06eb69e455 | |||
| 338a11eaaf | |||
| 8814a0710a | |||
| 76a55cdb54 | |||
| 736badb284 | |||
| 37f77bb778 | |||
| 7e5e48c588 | |||
| 5f0017046f | |||
| 612d867838 | |||
| 8c2668ebe4 | |||
| be4b1bd99b | |||
| 502525eff1 | |||
| 09b5afe9c3 | |||
| 9a4fc784db | |||
| 04177db648 | |||
| 2408dbf855 | |||
| 9c4a8782e4 | |||
| 0d549ea39e | |||
| 0060081cad | |||
| 0c2d06dd9a | |||
| 14b9be4c47 | |||
| 6bac5650e6 | |||
| 6170462a61 | |||
| 2ad5926b13 | |||
| a6ddc85e4c | |||
| aceff35f29 | |||
| 3ae96c3aa6 | |||
| 0dcaaa9083 | |||
| 323a7f0349 | |||
| 736cbea862 | |||
| d3e290978e | |||
| 9c91cfcb7d | |||
| e279f7fcfd | |||
| a555cffebe | |||
| 49f5435392 | |||
| a087dd9b85 | |||
| 6e89c301b2 | |||
| d5dac448a6 | |||
| 00e6eb35f1 | |||
| cdb455b2b1 | |||
| 837c65ba23 | |||
| 035293b612 | |||
| 250b5df836 | |||
| ec59dbc6ee | |||
| 4d5676f00e | |||
| 2a4b62527a | |||
| ec0341c696 | |||
| 2e5f3a5a66 | |||
| 231a5fab86 | |||
| 10319ea69d | |||
| 53bb5aff22 | |||
| 52a5fff61f | |||
| f28754b883 | |||
| 6fce797ca2 | |||
| a1fd315104 | |||
| a91f0ac8b5 | |||
| 2c96df05f4 | |||
| b57788c7b9 | |||
| 7431bab2a7 | |||
| a52697bfdf | |||
| 9dc2199381 | |||
| 89db760b89 | |||
| 4356c1e186 | |||
| e32cebc553 | |||
| 23e1cc281d | |||
| 48d3fb4fe3 | |||
| ab727e6816 | |||
| 23d882d7ab | |||
| 59435167ea | |||
| 77cdd793f8 | |||
| d13f3f0e0c | |||
| 56821de2f4 | |||
| 92190fa69f | |||
| 85db7c5183 | |||
| a55ac266bf | |||
| 90622e0437 | |||
| 81596250dc | |||
| 43db5fe527 | |||
| dfb479fa80 | |||
| aa88b453ff | |||
| fbda66c6d1 | |||
| 2200e65519 | |||
| b8537aa22d | |||
| cb4a5dec79 | |||
| 0286de7ce2 | |||
| b00602f109 | |||
| 1cfae546a0 | |||
| 05dae4e8d1 | |||
| 52ddaca4c5 | |||
| 940a1202b3 | |||
| ec27451199 | |||
| 60e06dcc6e | |||
| 7733aab088 | |||
| 5c6fadcfe7 | |||
| 1bdb314e2c | |||
| 5b0365947f | |||
| b512f6c421 | |||
| c4a8771647 | |||
| 6f967c6da7 | |||
| 82cd29d595 | |||
| 14c2334e1b | |||
| 3598514cb4 | |||
| c4ba061f30 | |||
| f4530b21d2 | |||
| 3949ab736d | |||
| 9da5066b18 | |||
| 941539616c | |||
| 135fa044b7 | |||
| 48913c1886 | |||
| ea20943f83 | |||
| 2738cfd1bd | |||
| 265c3d818e | |||
| c0a9fdf8c8 | |||
| 8b3335f426 | |||
| 252033d113 | |||
| 0bc00dbca4 |
@@ -85,44 +85,9 @@ DJANGO_CACHE_MAX_AGE=3600
|
||||
DJANGO_STALE_WHILE_REVALIDATE=60
|
||||
DJANGO_MANAGE_DB_PARTITIONS=True
|
||||
# openssl genrsa -out private.pem 2048
|
||||
DJANGO_TOKEN_SIGNING_KEY="-----BEGIN PRIVATE KEY-----
|
||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDs4e+kt7SnUJek
|
||||
6V5r9zMGzXCoU5qnChfPiqu+BgANyawz+MyVZPs6RCRfeo6tlCknPQtOziyXYM2I
|
||||
7X+qckmuzsjqp8+u+o1mw3VvUuJew5k2SQLPYwsiTzuFNVJEOgRo3hywGiGwS2iv
|
||||
/5nh2QAl7fq2qLqZEXQa5+/xJlQggS1CYxOJgggvLyra50QZlBvPve/AxKJ/EV/Q
|
||||
irWTZU5lLNI8sH2iZR05vQeBsxZ0dCnGMT+vGl+cGkqrvzQzKsYbDmabMcfTYhYi
|
||||
78fpv6A4uharJFHayypYBjE39PwhMyyeycrNXlpm1jpq+03HgmDuDMHydk1tNwuT
|
||||
nEC7m7iNAgMBAAECggEAA2m48nJcJbn9SVi8bclMwKkWmbJErOnyEGEy2sTK3Of+
|
||||
NWx9BB0FmqAPNxn0ss8K7cANKOhDD7ZLF9E2MO4/HgfoMKtUzHRbM7MWvtEepldi
|
||||
nnvcUMEgULD8Dk4HnqiIVjt3BdmGiTv46OpBnRWrkSBV56pUL+7msZmMZTjUZvh2
|
||||
ZWv0+I3gtDIjo2Zo/FiwDV7CfwRjJarRpYUj/0YyuSA4FuOUYl41WAX1I301FKMH
|
||||
xo3jiAYi1s7IneJ16OtPpOA34Wg5F6ebm/UO0uNe+iD4kCXKaZmxYQPh5tfB0Qa3
|
||||
qj1T7GNpFNyvtG7VVdauhkb8iu8X/wl6PCwbg0RCKQKBgQD9HfpnpH0lDlHMRw9K
|
||||
X7Vby/1fSYy1BQtlXFEIPTN/btJ/asGxLmAVwJ2HAPXWlrfSjVAH7CtVmzN7v8oj
|
||||
HeIHfeSgoWEu1syvnv2AMaYSo03UjFFlfc/GUxF7DUScRIhcJUPCP8jkAROz9nFv
|
||||
DByNjUL17Q9r43DmDiRsy0IFqQKBgQDvlJ9Uhl+Sp7gRgKYwa/IG0+I4AduAM+Gz
|
||||
Dxbm52QrMGMTjaJFLmLHBUZ/ot+pge7tZZGws8YR8ufpyMJbMqPjxhIvRRa/p1Tf
|
||||
E3TQPW93FMsHUvxAgY3MV5MzXFPhlNAKb+akP/RcXUhetGAuZKLubtDCWa55ZQuL
|
||||
wj2OS+niRQKBgE7K8zUqNi6/22S8xhy/2GPgB1qPObbsABUofK0U6CAGLo6te+gc
|
||||
6Jo84IyzFtQbDNQFW2Fr+j1m18rw9AqkdcUhQndiZS9AfG07D+zFB86LeWHt4DS4
|
||||
ymIRX8Kvaak/iDcu/n3Mf0vCrhB6aetImObTj4GgrwlFvtJOmrYnO8EpAoGAIXXP
|
||||
Xt25gWD9OyyNiVu6HKwA/zN7NYeJcRmdaDhO7B1A6R0x2Zml4AfjlbXoqOLlvLAf
|
||||
zd79vcoAC82nH1eOPiSOq51plPDI0LMF8IN0CtyTkn1Lj7LIXA6rF1RAvtOqzppc
|
||||
SvpHpZK9pcRpXnFdtBE0BMDDtl6fYzCIqlP94UUCgYEAnhXbAQMF7LQifEm34Dx8
|
||||
BizRMOKcqJGPvbO2+Iyt50O5X6onU2ITzSV1QHtOvAazu+B1aG9pEuBFDQ+ASxEu
|
||||
L9ruJElkOkb/o45TSF6KCsHd55ReTZ8AqnRjf5R+lyzPqTZCXXb8KTcRvWT4zQa3
|
||||
VxyT2PnaSqEcexWUy4+UXoQ=
|
||||
-----END PRIVATE KEY-----"
|
||||
DJANGO_TOKEN_SIGNING_KEY=""
|
||||
# openssl rsa -in private.pem -pubout -out public.pem
|
||||
DJANGO_TOKEN_VERIFYING_KEY="-----BEGIN PUBLIC KEY-----
|
||||
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA7OHvpLe0p1CXpOlea/cz
|
||||
Bs1wqFOapwoXz4qrvgYADcmsM/jMlWT7OkQkX3qOrZQpJz0LTs4sl2DNiO1/qnJJ
|
||||
rs7I6qfPrvqNZsN1b1LiXsOZNkkCz2MLIk87hTVSRDoEaN4csBohsEtor/+Z4dkA
|
||||
Je36tqi6mRF0Gufv8SZUIIEtQmMTiYIILy8q2udEGZQbz73vwMSifxFf0Iq1k2VO
|
||||
ZSzSPLB9omUdOb0HgbMWdHQpxjE/rxpfnBpKq780MyrGGw5mmzHH02IWIu/H6b+g
|
||||
OLoWqyRR2ssqWAYxN/T8ITMsnsnKzV5aZtY6avtNx4Jg7gzB8nZNbTcLk5xAu5u4
|
||||
jQIDAQAB
|
||||
-----END PUBLIC KEY-----"
|
||||
DJANGO_TOKEN_VERIFYING_KEY=""
|
||||
# openssl rand -base64 32
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
|
||||
@@ -134,7 +99,7 @@ SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.10.0
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.12.2
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
|
||||
@@ -110,6 +110,10 @@ component/ui:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "ui/**"
|
||||
|
||||
component/mcp-server:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "mcp_server/**"
|
||||
|
||||
compliance:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/compliance/**"
|
||||
@@ -119,3 +123,7 @@ compliance:
|
||||
review-django-migrations:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "api/src/backend/api/migrations/**"
|
||||
|
||||
metadata-review:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "**/*.metadata.json"
|
||||
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
@@ -107,7 +107,7 @@ jobs:
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
@@ -44,16 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
@@ -76,20 +76,20 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
.github/workflows/api-pull-request.yml
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
|
||||
- name: Replace @master with current branch in pyproject.toml
|
||||
- name: Replace @master with current branch in pyproject.toml - Only for pull requests to `master`
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' && github.event_name == 'pull_request' && github.base_ref == 'master'
|
||||
run: |
|
||||
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
|
||||
echo "Using branch: $BRANCH_NAME"
|
||||
@@ -102,15 +102,9 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Update poetry.lock after the branch name change
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock
|
||||
|
||||
- name: Update SDK's poetry.lock resolved_reference to latest commit - Only for push events to `master`
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' && github.event_name == 'push'
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' && github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||
run: |
|
||||
# Get the latest commit hash from the prowler-cloud/prowler repository
|
||||
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
|
||||
@@ -125,19 +119,19 @@ jobs:
|
||||
echo "Updated resolved_reference:"
|
||||
grep -A2 -B2 "resolved_reference" poetry.lock
|
||||
|
||||
- name: Update poetry.lock
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install system dependencies for xmlsec
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxml2-dev libxmlsec1-dev libxmlsec1-openssl pkg-config
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
@@ -208,7 +202,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -216,11 +210,11 @@ jobs:
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
|
||||
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Find existing documentation comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
|
||||
@@ -7,11 +7,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@a05cf0859455b5b16317ee22d809887a4043cdf0 # v3.90.2
|
||||
uses: trufflesecurity/trufflehog@466da5b0bb161144f6afca9afe5d57975828c410 # v3.90.8
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
@@ -14,4 +14,4 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
|
||||
@@ -9,14 +9,15 @@ on:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
# Leaving this commented until we find a way to run it for forks but in Prowler's context
|
||||
# pull_request_target:
|
||||
# types:
|
||||
# - opened
|
||||
# - synchronize
|
||||
# - reopened
|
||||
# branches:
|
||||
# - "master"
|
||||
# - "v5.*"
|
||||
|
||||
jobs:
|
||||
conflict-checker:
|
||||
@@ -24,14 +25,15 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
**
|
||||
@@ -69,7 +71,7 @@ jobs:
|
||||
|
||||
- name: Add conflict label
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
@@ -95,7 +97,7 @@ jobs:
|
||||
|
||||
- name: Remove conflict label
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'false'
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
@@ -117,7 +119,7 @@ jobs:
|
||||
|
||||
- name: Find existing conflict comment
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
@@ -147,7 +149,7 @@ jobs:
|
||||
|
||||
- name: Find existing conflict comment when resolved
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'false'
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
id: find-resolved-comment
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
@@ -165,3 +167,9 @@ jobs:
|
||||
✅ **Conflict Markers Resolved**
|
||||
|
||||
All conflict markers have been successfully resolved in this pull request.
|
||||
|
||||
- name: Fail workflow if conflicts detected
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
run: |
|
||||
echo "::error::Workflow failed due to conflict markers in files: ${{ steps.conflict-check.outputs.conflict_files }}"
|
||||
exit 1
|
||||
|
||||
@@ -22,13 +22,13 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
@@ -59,25 +59,157 @@ jobs:
|
||||
BRANCH_NAME="v${MAJOR_VERSION}.${MINOR_VERSION}"
|
||||
echo "BRANCH_NAME=${BRANCH_NAME}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Calculate UI version (1.X.X format - matches Prowler minor version)
|
||||
UI_VERSION="1.${MINOR_VERSION}.${PATCH_VERSION}"
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
# Function to extract the latest version from changelog
|
||||
extract_latest_version() {
|
||||
local changelog_file="$1"
|
||||
if [ -f "$changelog_file" ]; then
|
||||
# Extract the first version entry (most recent) from changelog
|
||||
# Format: ## [version] (1.2.3) or ## [vversion] (v1.2.3)
|
||||
local version=$(grep -m 1 '^## \[' "$changelog_file" | sed 's/^## \[\(.*\)\].*/\1/' | sed 's/^v//' | tr -d '[:space:]')
|
||||
echo "$version"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Calculate API version (1.X.X format - one minor version ahead)
|
||||
API_MINOR_VERSION=$((MINOR_VERSION + 1))
|
||||
API_VERSION="1.${API_MINOR_VERSION}.${PATCH_VERSION}"
|
||||
# Read actual versions from changelogs (source of truth)
|
||||
UI_VERSION=$(extract_latest_version "ui/CHANGELOG.md")
|
||||
API_VERSION=$(extract_latest_version "api/CHANGELOG.md")
|
||||
SDK_VERSION=$(extract_latest_version "prowler/CHANGELOG.md")
|
||||
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "SDK_VERSION=${SDK_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
if [ -n "$UI_VERSION" ]; then
|
||||
echo "Read UI version from changelog: $UI_VERSION"
|
||||
else
|
||||
echo "Warning: No UI version found in ui/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$API_VERSION" ]; then
|
||||
echo "Read API version from changelog: $API_VERSION"
|
||||
else
|
||||
echo "Warning: No API version found in api/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$SDK_VERSION" ]; then
|
||||
echo "Read SDK version from changelog: $SDK_VERSION"
|
||||
else
|
||||
echo "Warning: No SDK version found in prowler/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
echo "Prowler version: $PROWLER_VERSION"
|
||||
echo "Branch name: $BRANCH_NAME"
|
||||
echo "UI version: $UI_VERSION"
|
||||
echo "API version: $API_VERSION"
|
||||
echo "SDK version: $SDK_VERSION"
|
||||
echo "Is minor release: $([ $PATCH_VERSION -eq 0 ] && echo 'true' || echo 'false')"
|
||||
else
|
||||
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Extract changelog entries
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Function to extract changelog for a specific version
|
||||
extract_changelog() {
|
||||
local file="$1"
|
||||
local version="$2"
|
||||
local output_file="$3"
|
||||
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: $file not found, skipping..."
|
||||
touch "$output_file"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract changelog section for this version
|
||||
awk -v version="$version" '
|
||||
/^## \[v?'"$version"'\]/ { found=1; next }
|
||||
found && /^## \[v?[0-9]+\.[0-9]+\.[0-9]+\]/ { found=0 }
|
||||
found && !/^## \[v?'"$version"'\]/ { print }
|
||||
' "$file" > "$output_file"
|
||||
|
||||
# Remove --- separators
|
||||
sed -i '/^---$/d' "$output_file"
|
||||
|
||||
# Remove trailing empty lines
|
||||
sed -i '/^$/d' "$output_file"
|
||||
}
|
||||
|
||||
# Calculate expected versions for this release
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
EXPECTED_UI_VERSION="1.${BASH_REMATCH[2]}.${BASH_REMATCH[3]}"
|
||||
EXPECTED_API_VERSION="1.$((${BASH_REMATCH[2]} + 1)).${BASH_REMATCH[3]}"
|
||||
|
||||
echo "Expected UI version for this release: $EXPECTED_UI_VERSION"
|
||||
echo "Expected API version for this release: $EXPECTED_API_VERSION"
|
||||
fi
|
||||
|
||||
# Determine if components have changes for this specific release
|
||||
# UI has changes if its current version matches what we expect for this release
|
||||
if [ -n "$UI_VERSION" ] && [ "$UI_VERSION" = "$EXPECTED_UI_VERSION" ]; then
|
||||
echo "HAS_UI_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ UI changes detected - version matches expected: $UI_VERSION"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
else
|
||||
echo "HAS_UI_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No UI changes for this release (current: $UI_VERSION, expected: $EXPECTED_UI_VERSION)"
|
||||
touch "ui_changelog.md"
|
||||
fi
|
||||
|
||||
# API has changes if its current version matches what we expect for this release
|
||||
if [ -n "$API_VERSION" ] && [ "$API_VERSION" = "$EXPECTED_API_VERSION" ]; then
|
||||
echo "HAS_API_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ API changes detected - version matches expected: $API_VERSION"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
else
|
||||
echo "HAS_API_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No API changes for this release (current: $API_VERSION, expected: $EXPECTED_API_VERSION)"
|
||||
touch "api_changelog.md"
|
||||
fi
|
||||
|
||||
# SDK has changes if its current version matches the input version
|
||||
if [ -n "$SDK_VERSION" ] && [ "$SDK_VERSION" = "$PROWLER_VERSION" ]; then
|
||||
echo "HAS_SDK_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ SDK changes detected - version matches input: $SDK_VERSION"
|
||||
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
|
||||
else
|
||||
echo "HAS_SDK_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No SDK changes for this release (current: $SDK_VERSION, input: $PROWLER_VERSION)"
|
||||
touch "prowler_changelog.md"
|
||||
fi
|
||||
|
||||
# Combine changelogs in order: UI, API, SDK
|
||||
> combined_changelog.md
|
||||
|
||||
if [ "$HAS_UI_CHANGES" = "true" ] && [ -s "ui_changelog.md" ]; then
|
||||
echo "## UI" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat ui_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ "$HAS_API_CHANGES" = "true" ] && [ -s "api_changelog.md" ]; then
|
||||
echo "## API" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat api_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ "$HAS_SDK_CHANGES" = "true" ] && [ -s "prowler_changelog.md" ]; then
|
||||
echo "## SDK" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat prowler_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
echo "Combined changelog preview:"
|
||||
cat combined_changelog.md
|
||||
|
||||
- name: Checkout existing branch for patch release
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
run: |
|
||||
@@ -114,6 +246,7 @@ jobs:
|
||||
echo "✓ prowler/config/config.py version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in api/pyproject.toml
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep '^version = ' api/pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
@@ -124,7 +257,7 @@ jobs:
|
||||
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Verify prowler dependency in api/pyproject.toml
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
if: ${{ env.PATCH_VERSION != '0' && env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
|
||||
@@ -135,6 +268,7 @@ jobs:
|
||||
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
|
||||
|
||||
- name: Verify version in api/src/backend/api/v1/views.py
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep 'spectacular_settings.VERSION = ' api/src/backend/api/v1/views.py | sed -E 's/.*spectacular_settings.VERSION = "([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
@@ -162,12 +296,11 @@ jobs:
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
|
||||
|
||||
# Create a temporary branch for the PR
|
||||
# Create a temporary branch for the PR from the minor version branch
|
||||
TEMP_BRANCH="update-api-dependency-$BRANCH_NAME_TRIMMED-$(date +%s)"
|
||||
echo "TEMP_BRANCH=$TEMP_BRANCH" >> $GITHUB_ENV
|
||||
|
||||
# Switch back to master and create temp branch
|
||||
git checkout master
|
||||
# Create temp branch from the current minor version branch
|
||||
git checkout -b "$TEMP_BRANCH"
|
||||
|
||||
# Minor release: update the dependency to use the release branch
|
||||
@@ -221,77 +354,14 @@ jobs:
|
||||
component/api
|
||||
no-changelog
|
||||
|
||||
- name: Extract changelog entries
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Function to extract changelog for a specific version
|
||||
extract_changelog() {
|
||||
local file="$1"
|
||||
local version="$2"
|
||||
local output_file="$3"
|
||||
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: $file not found, skipping..."
|
||||
touch "$output_file"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract changelog section for this version
|
||||
awk -v version="$version" '
|
||||
/^## \[v?'"$version"'\]/ { found=1; next }
|
||||
found && /^## \[v?[0-9]+\.[0-9]+\.[0-9]+\]/ { found=0 }
|
||||
found && !/^## \[v?'"$version"'\]/ { print }
|
||||
' "$file" > "$output_file"
|
||||
|
||||
# Remove --- separators
|
||||
sed -i '/^---$/d' "$output_file"
|
||||
|
||||
# Remove trailing empty lines
|
||||
sed -i '/^$/d' "$output_file"
|
||||
}
|
||||
|
||||
# Extract changelogs
|
||||
echo "Extracting changelog entries..."
|
||||
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
|
||||
# Combine changelogs in order: UI, API, SDK
|
||||
> combined_changelog.md
|
||||
|
||||
if [ -s "ui_changelog.md" ]; then
|
||||
echo "## UI" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat ui_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ -s "api_changelog.md" ]; then
|
||||
echo "## API" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat api_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ -s "prowler_changelog.md" ]; then
|
||||
echo "## SDK" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat prowler_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
echo "Combined changelog preview:"
|
||||
cat combined_changelog.md
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
|
||||
uses: softprops/action-gh-release@6cbd405e2c4e67a21c47fa9e383d020e4e28b836 # v2.3.3
|
||||
with:
|
||||
tag_name: ${{ env.PROWLER_VERSION }}
|
||||
name: Prowler ${{ env.PROWLER_VERSION }}
|
||||
body_path: combined_changelog.md
|
||||
draft: true
|
||||
target_commitish: ${{ env.PATCH_VERSION == '0' && 'master' || env.BRANCH_NAME }}
|
||||
target_commitish: ${{ env.BRANCH_NAME }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
||||
@@ -13,10 +13,10 @@ jobs:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: "api ui prowler"
|
||||
MONITORED_FOLDERS: "api ui prowler dashboard"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find_comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e #v3.1.0
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad #v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
|
||||
@@ -11,7 +11,7 @@ jobs:
|
||||
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.merge_commit_sha }}
|
||||
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Trigger pull request
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
@@ -59,10 +59,10 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
@@ -108,13 +108,13 @@ jobs:
|
||||
esac
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
|
||||
@@ -14,7 +14,7 @@ jobs:
|
||||
name: Bump Version
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Get Prowler version
|
||||
shell: bash
|
||||
|
||||
@@ -52,16 +52,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
@@ -21,11 +21,11 @@ jobs:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
@@ -104,7 +104,7 @@ jobs:
|
||||
|
||||
- name: Dockerfile - Check if Dockerfile has changed
|
||||
id: dockerfile-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
Dockerfile
|
||||
@@ -117,12 +117,12 @@ jobs:
|
||||
# Test AWS
|
||||
- name: AWS - Check if any file has changed
|
||||
id: aws-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/aws/**
|
||||
./tests/providers/aws/**
|
||||
.poetry.lock
|
||||
./poetry.lock
|
||||
|
||||
- name: AWS - Test
|
||||
if: steps.aws-changed-files.outputs.any_changed == 'true'
|
||||
@@ -132,12 +132,12 @@ jobs:
|
||||
# Test Azure
|
||||
- name: Azure - Check if any file has changed
|
||||
id: azure-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/azure/**
|
||||
./tests/providers/azure/**
|
||||
.poetry.lock
|
||||
./poetry.lock
|
||||
|
||||
- name: Azure - Test
|
||||
if: steps.azure-changed-files.outputs.any_changed == 'true'
|
||||
@@ -147,12 +147,12 @@ jobs:
|
||||
# Test GCP
|
||||
- name: GCP - Check if any file has changed
|
||||
id: gcp-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/gcp/**
|
||||
./tests/providers/gcp/**
|
||||
.poetry.lock
|
||||
./poetry.lock
|
||||
|
||||
- name: GCP - Test
|
||||
if: steps.gcp-changed-files.outputs.any_changed == 'true'
|
||||
@@ -162,12 +162,12 @@ jobs:
|
||||
# Test Kubernetes
|
||||
- name: Kubernetes - Check if any file has changed
|
||||
id: kubernetes-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/kubernetes/**
|
||||
./tests/providers/kubernetes/**
|
||||
.poetry.lock
|
||||
./poetry.lock
|
||||
|
||||
- name: Kubernetes - Test
|
||||
if: steps.kubernetes-changed-files.outputs.any_changed == 'true'
|
||||
@@ -177,12 +177,12 @@ jobs:
|
||||
# Test GitHub
|
||||
- name: GitHub - Check if any file has changed
|
||||
id: github-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/github/**
|
||||
./tests/providers/github/**
|
||||
.poetry.lock
|
||||
./poetry.lock
|
||||
|
||||
- name: GitHub - Test
|
||||
if: steps.github-changed-files.outputs.any_changed == 'true'
|
||||
@@ -192,12 +192,12 @@ jobs:
|
||||
# Test NHN
|
||||
- name: NHN - Check if any file has changed
|
||||
id: nhn-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/nhn/**
|
||||
./tests/providers/nhn/**
|
||||
.poetry.lock
|
||||
./poetry.lock
|
||||
|
||||
- name: NHN - Test
|
||||
if: steps.nhn-changed-files.outputs.any_changed == 'true'
|
||||
@@ -207,12 +207,12 @@ jobs:
|
||||
# Test M365
|
||||
- name: M365 - Check if any file has changed
|
||||
id: m365-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/m365/**
|
||||
./tests/providers/m365/**
|
||||
.poetry.lock
|
||||
./poetry.lock
|
||||
|
||||
- name: M365 - Test
|
||||
if: steps.m365-changed-files.outputs.any_changed == 'true'
|
||||
@@ -222,12 +222,12 @@ jobs:
|
||||
# Test IaC
|
||||
- name: IaC - Check if any file has changed
|
||||
id: iac-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/iac/**
|
||||
./tests/providers/iac/**
|
||||
.poetry.lock
|
||||
./poetry.lock
|
||||
|
||||
- name: IaC - Test
|
||||
if: steps.iac-changed-files.outputs.any_changed == 'true'
|
||||
@@ -237,7 +237,7 @@ jobs:
|
||||
# Test MongoDB Atlas
|
||||
- name: MongoDB Atlas - Check if any file has changed
|
||||
id: mongodb-atlas-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/mongodbatlas/**
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
# Codecov
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
|
||||
@@ -64,14 +64,14 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
# cache: ${{ env.CACHE }}
|
||||
|
||||
@@ -23,12 +23,12 @@ jobs:
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@b47578312673ae6fa5b5096b330d9fbac3d116df # v4.2.1
|
||||
uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 # v5.0.0
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services"
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, no-changelog"
|
||||
title: "chore(regions_update): Changes in regions for AWS services"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
@@ -44,16 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
@@ -20,7 +20,9 @@ jobs:
|
||||
NEXT_PUBLIC_API_BASE_URL: 'http://localhost:8080/api/v1'
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Fix API data directory permissions
|
||||
run: docker run --rm -v $(pwd)/_data/api:/data alpine chown -R 1000:1000 /data
|
||||
- name: Start API services
|
||||
run: |
|
||||
# Override docker-compose image tag to use latest instead of stable
|
||||
@@ -57,7 +59,7 @@ jobs:
|
||||
echo "All database fixtures loaded successfully!"
|
||||
'
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
@@ -69,7 +71,7 @@ jobs:
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
|
||||
@@ -27,11 +27,11 @@ jobs:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- name: Build Container
|
||||
|
||||
@@ -63,6 +63,7 @@ junit-reports/
|
||||
# .env
|
||||
ui/.env*
|
||||
api/.env*
|
||||
mcp_server/.env*
|
||||
.env.local
|
||||
|
||||
# Coverage
|
||||
@@ -78,3 +79,7 @@ _data/
|
||||
|
||||
# Claude
|
||||
CLAUDE.md
|
||||
|
||||
# MCP Server
|
||||
mcp_server/prowler_mcp_server/prowler_app/server.py
|
||||
mcp_server/prowler_mcp_server/prowler_app/utils/schema.yaml
|
||||
|
||||
@@ -6,6 +6,7 @@ repos:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: ["--unsafe"]
|
||||
exclude: prowler/config/llm_config.yaml
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
|
||||
@@ -0,0 +1,110 @@
|
||||
# Repository Guidelines
|
||||
|
||||
## How to Use This Guide
|
||||
|
||||
- Start here for cross-project norms, Prowler is a monorepo with several components. Every component should have an `AGENTS.md` file that contains the guidelines for the agents in that component. The file is located beside the code you are touching (e.g. `api/AGENTS.md`, `ui/AGENTS.md`, `prowler/AGENTS.md`).
|
||||
- Follow the stricter rule when guidance conflicts; component docs override this file for their scope.
|
||||
- Keep instructions synchronized. When you add new workflows or scripts, update both, the relevant component `AGENTS.md` and this file if they apply broadly.
|
||||
|
||||
## Project Overview
|
||||
|
||||
Prowler is an open-source cloud security assessment tool that supports multiple cloud providers (AWS, Azure, GCP, Kubernetes, GitHub, M365, etc.). The project consists in a monorepo with the following main components:
|
||||
|
||||
- **Prowler SDK**: Python SDK, includes the Prowler CLI, providers, services, checks, compliances, config, etc. (`prowler/`)
|
||||
- **Prowler API**: Django-based REST API backend (`api/`)
|
||||
- **Prowler UI**: Next.js frontend application (`ui/`)
|
||||
- **Prowler MCP Server**: Model Context Protocol server that gives access to the entire Prowler ecosystem for LLMs (`mcp_server/`)
|
||||
- **Prowler Dashboard**: Prowler CLI feature that allows to visualize the results of the scans in a simple dashboard (`dashboard/`)
|
||||
|
||||
### Project Structure (Key Folders & Files)
|
||||
|
||||
- `prowler/`: Main source code for Prowler SDK (CLI, providers, services, checks, compliances, config, etc.)
|
||||
- `api/`: Django-based REST API backend components
|
||||
- `ui/`: Next.js frontend application
|
||||
- `mcp_server/`: Model Context Protocol server that gives access to the entire Prowler ecosystem for LLMs
|
||||
- `dashboard/`: Prowler CLI feature that allows to visualize the results of the scans in a simple dashboard
|
||||
- `docs/`: Documentation
|
||||
- `examples/`: Example output formats for providers and scripts
|
||||
- `permissions/`: Permission-related files and policies
|
||||
- `contrib/`: Community-contributed scripts or modules
|
||||
- `tests/`: Prowler SDK test suite
|
||||
- `docker-compose.yml`: Docker compose file to run the Prowler App (API + UI) production environment
|
||||
- `docker-compose-dev.yml`: Docker compose file to run the Prowler App (API + UI) development environment
|
||||
- `pyproject.toml`: Poetry Prowler SDK project file
|
||||
- `.pre-commit-config.yaml`: Pre-commit hooks configuration
|
||||
- `Makefile`: Makefile to run the project
|
||||
- `LICENSE`: License file
|
||||
- `README.md`: README file
|
||||
- `CONTRIBUTING.md`: Contributing guide
|
||||
|
||||
## Python Development
|
||||
|
||||
Most of the code is written in Python, so the main files in the root are focused on Python code.
|
||||
|
||||
### Poetry Dev Environment
|
||||
|
||||
For developing in Python we recommend using `poetry` to manage the dependencies. The minimal version is `2.1.1`. So it is recommended to run all commands using `poetry run ...`.
|
||||
|
||||
To install the core dependencies to develop it is needed to run `poetry install --with dev`.
|
||||
|
||||
### Pre-commit hooks
|
||||
|
||||
The project has pre-commit hooks to lint and format the code. They are installed by running `poetry run pre-commit install`.
|
||||
|
||||
When commiting a change, the hooks will be run automatically. Some of them are:
|
||||
|
||||
- Code formatting (black, isort)
|
||||
- Linting (flake8, pylint)
|
||||
- Security checks (bandit, safety, trufflehog)
|
||||
- YAML/JSON validation
|
||||
- Poetry lock file validation
|
||||
|
||||
|
||||
### Linting and Formatting
|
||||
|
||||
We use the following tools to lint and format the code:
|
||||
|
||||
- `flake8`: for linting the code
|
||||
- `black`: for formatting the code
|
||||
- `pylint`: for linting the code
|
||||
|
||||
You can run all using the `make` command:
|
||||
```bash
|
||||
poetry run make lint
|
||||
poetry run make format
|
||||
```
|
||||
|
||||
Or they will be run automatically when you commit your changes using pre-commit hooks.
|
||||
|
||||
## Commit & Pull Request Guidelines
|
||||
|
||||
For the commit messages and pull requests name follow the conventional-commit style.
|
||||
|
||||
Befire creating a pull request, complete the checklist in `.github/pull_request_template.md`. Summaries should explain deployment impact, highlight review steps, and note changelog or permission updates. Run all relevant tests and linters before requesting review and link screenshots for UI or dashboard changes.
|
||||
|
||||
### Conventional Commit Style
|
||||
|
||||
The Conventional Commits specification is a lightweight convention on top of commit messages. It provides an easy set of rules for creating an explicit commit history; which makes it easier to write automated tools on top of.
|
||||
|
||||
The commit message should be structured as follows:
|
||||
|
||||
```
|
||||
<type>[optional scope]: <description>
|
||||
<BLANK LINE>
|
||||
[optional body]
|
||||
<BLANK LINE>
|
||||
[optional footer(s)]
|
||||
```
|
||||
|
||||
Any line of the commit message cannot be longer 100 characters! This allows the message to be easier to read on GitHub as well as in various git tools
|
||||
|
||||
#### Commit Types
|
||||
|
||||
- **feat**: code change introuce new functionality to the application
|
||||
- **fix**: code change that solve a bug in the codebase
|
||||
- **docs**: documentation only changes
|
||||
- **chore**: changes related to the build process or auxiliary tools and libraries, that do not affect the application's functionality
|
||||
- **perf**: code change that improves performance
|
||||
- **refactor**: code change that neither fixes a bug nor adds a feature
|
||||
- **style**: changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc)
|
||||
- **test**: adding missing tests or correcting existing tests
|
||||
@@ -45,3 +45,7 @@ pypi-upload: ## Upload package
|
||||
help: ## Show this help.
|
||||
@echo "Prowler Makefile"
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
##@ Development Environment
|
||||
run-api-dev: ## Start development environment with API, PostgreSQL, Valkey, and workers
|
||||
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat --build
|
||||
|
||||
@@ -90,6 +90,7 @@ prowler dashboard
|
||||
| M365 | 70 | 7 | 3 | 2 | Official | Stable | UI, API, CLI |
|
||||
| IaC | [See `trivy` docs.](https://trivy.dev/latest/docs/coverage/iac/) | N/A | N/A | N/A | Official | Beta | CLI |
|
||||
| MongoDB Atlas | 10 | 3 | 0 | 0 | Official | Beta | CLI |
|
||||
| LLM | [See `promptfoo` docs.](https://www.promptfoo.dev/docs/red-team/plugins/) | N/A | N/A | N/A | Official | Beta | CLI |
|
||||
| NHN | 6 | 2 | 1 | 0 | Unofficial | Beta | CLI |
|
||||
|
||||
> [!Note]
|
||||
@@ -301,40 +302,12 @@ And many more environments.
|
||||
|
||||

|
||||
|
||||
# Deprecations from v3
|
||||
|
||||
## General
|
||||
- `Allowlist` now is called `Mutelist`.
|
||||
- The `--quiet` option has been deprecated. Use the `--status` flag to filter findings based on their status: PASS, FAIL, or MANUAL.
|
||||
- All findings with an `INFO` status have been reclassified as `MANUAL`.
|
||||
- The CSV output format is standardized across all providers.
|
||||
|
||||
**Deprecated Output Formats**
|
||||
|
||||
The following formats are now deprecated:
|
||||
- Native JSON has been replaced with JSON in [OCSF] v1.1.0 format, which is standardized across all providers (https://schema.ocsf.io/).
|
||||
|
||||
## AWS
|
||||
|
||||
**AWS Flag Deprecation**
|
||||
|
||||
The flag --sts-endpoint-region has been deprecated due to the adoption of AWS STS regional tokens.
|
||||
|
||||
**Sending FAIL Results to AWS Security Hub**
|
||||
|
||||
- To send only FAILS to AWS Security Hub, use one of the following options: `--send-sh-only-fails` or `--security-hub --status FAIL`.
|
||||
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
**Documentation Resources**
|
||||
|
||||
For installation instructions, usage details, tutorials, and the Developer Guide, visit https://docs.prowler.com/
|
||||
|
||||
# 📃 License
|
||||
|
||||
**Prowler License Information**
|
||||
|
||||
Prowler is licensed under the Apache License 2.0, as indicated in each file within the repository. Obtaining a Copy of the License
|
||||
Prowler is licensed under the Apache License 2.0.
|
||||
|
||||
A copy of the License is available at <http://www.apache.org/licenses/LICENSE-2.0>
|
||||
|
||||
@@ -2,6 +2,39 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.14.0] (Prowler UNRELEASED)
|
||||
|
||||
### Added
|
||||
- Default JWT keys are generated and stored if they are missing from configuration [(#8655)](https://github.com/prowler-cloud/prowler/pull/8655)
|
||||
- `compliance_name` for each compliance [(#7920)](https://github.com/prowler-cloud/prowler/pull/7920)
|
||||
- API Key support [(#8805)](https://github.com/prowler-cloud/prowler/pull/8805)
|
||||
|
||||
### Changed
|
||||
- Now the MANAGE_ACCOUNT permission is required to modify or read user permissions instead of MANAGE_USERS [(#8281)](https://github.com/prowler-cloud/prowler/pull/8281)
|
||||
- Now at least one user with MANAGE_ACCOUNT permission is required in the tenant [(#8729)](https://github.com/prowler-cloud/prowler/pull/8729)
|
||||
|
||||
### Security
|
||||
- Django updated to the latest 5.1 security release, 5.1.13, due to problems with potential [SQL injection](https://github.com/prowler-cloud/prowler/security/dependabot/104) and [directory traversals](https://github.com/prowler-cloud/prowler/security/dependabot/103) [(#8842)](https://github.com/prowler-cloud/prowler/pull/8842)
|
||||
|
||||
---
|
||||
|
||||
## [1.13.2] (Prowler 5.12.3)
|
||||
|
||||
### Fixed
|
||||
- 500 error when deleting user [(#8731)](https://github.com/prowler-cloud/prowler/pull/8731)
|
||||
|
||||
---
|
||||
|
||||
## [1.13.1] (Prowler 5.12.2)
|
||||
|
||||
### Changed
|
||||
- Renamed compliance overview task queue to `compliance` [(#8755)](https://github.com/prowler-cloud/prowler/pull/8755)
|
||||
|
||||
### Security
|
||||
- Django updated to the latest 5.1 security release, 5.1.12, due to [problems](https://www.djangoproject.com/weblog/2025/sep/03/security-releases/) with potential SQL injection in FilteredRelation column aliases [(#8693)](https://github.com/prowler-cloud/prowler/pull/8693)
|
||||
|
||||
---
|
||||
|
||||
## [1.13.0] (Prowler 5.12.0)
|
||||
|
||||
### Added
|
||||
@@ -21,6 +54,8 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
### Fixed
|
||||
- GitHub provider always scans user instead of organization when using provider UID [(#8587)](https://github.com/prowler-cloud/prowler/pull/8587)
|
||||
|
||||
---
|
||||
|
||||
## [1.11.0] (Prowler 5.10.0)
|
||||
|
||||
### Added
|
||||
|
||||
@@ -18,7 +18,11 @@ Valkey exposes a Redis 7.2 compliant API. Any service that exposes the Redis API
|
||||
|
||||
# Modify environment variables
|
||||
|
||||
Under the root path of the project, you can find a file called `.env.example`. This file shows all the environment variables that the project uses. You *must* create a new file called `.env` and set the values for the variables.
|
||||
Under the root path of the project, you can find a file called `.env`. This file shows all the environment variables that the project uses. You should review it and set the values for the variables you want to change.
|
||||
|
||||
If you don’t set `DJANGO_TOKEN_SIGNING_KEY` or `DJANGO_TOKEN_VERIFYING_KEY`, the API will generate them at `~/.config/prowler-api/` with `0600` and `0644` permissions; back up these files to persist identity across redeploys.
|
||||
|
||||
**Important note**: Every Prowler version (or repository branches and tags) could have different variables set in its `.env` file. Please use the `.env` file that corresponds with each version.
|
||||
|
||||
## Local deployment
|
||||
Keep in mind if you export the `.env` file to use it with local deployment that you will have to do it within the context of the Poetry interpreter, not before. Otherwise, variables will not be loaded properly.
|
||||
|
||||
@@ -32,7 +32,7 @@ start_prod_server() {
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview,integrations -E --max-tasks-per-child 1
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview,integrations,compliance -E --max-tasks-per-child 1
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -273,14 +273,14 @@ tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" a
|
||||
|
||||
[[package]]
|
||||
name = "authlib"
|
||||
version = "1.6.1"
|
||||
version = "1.6.4"
|
||||
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "authlib-1.6.1-py2.py3-none-any.whl", hash = "sha256:e9d2031c34c6309373ab845afc24168fe9e93dc52d252631f52642f21f5ed06e"},
|
||||
{file = "authlib-1.6.1.tar.gz", hash = "sha256:4dffdbb1460ba6ec8c17981a4c67af7d8af131231b5a36a88a1e8c80c111cdfd"},
|
||||
{file = "authlib-1.6.4-py2.py3-none-any.whl", hash = "sha256:39313d2a2caac3ecf6d8f95fbebdfd30ae6ea6ae6a6db794d976405fdd9aa796"},
|
||||
{file = "authlib-1.6.4.tar.gz", hash = "sha256:104b0442a43061dc8bc23b133d1d06a2b0a9c2e3e33f34c4338929e816287649"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -383,6 +383,24 @@ cryptography = ">=2.1.4"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.0.1"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-apimanagement"
|
||||
version = "5.0.0"
|
||||
description = "Microsoft Azure API Management Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure_mgmt_apimanagement-5.0.0-py3-none-any.whl", hash = "sha256:b88c42a392333b60722fb86f15d092dfc19a8d67510dccd15c217381dff4e6ec"},
|
||||
{file = "azure_mgmt_apimanagement-5.0.0.tar.gz", hash = "sha256:0ab7fe17e70fe3154cd840ff47d19d7a4610217003eaa7c21acf3511a6e57999"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-common = ">=1.1"
|
||||
azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-applicationinsights"
|
||||
version = "4.1.0"
|
||||
@@ -540,6 +558,23 @@ azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-loganalytics"
|
||||
version = "12.0.0"
|
||||
description = "Microsoft Azure Log Analytics Management Client Library for Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure-mgmt-loganalytics-12.0.0.zip", hash = "sha256:da128a7e0291be7fa2063848df92a9180cf5c16d42adc09d2bc2efd711536bfb"},
|
||||
{file = "azure_mgmt_loganalytics-12.0.0-py2.py3-none-any.whl", hash = "sha256:75ac1d47dd81179905c40765be8834643d8994acff31056ddc1863017f3faa02"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-common = ">=1.1,<2.0"
|
||||
azure-mgmt-core = ">=1.2.0,<2.0.0"
|
||||
msrest = ">=0.6.21"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-monitor"
|
||||
version = "6.0.2"
|
||||
@@ -750,6 +785,23 @@ azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-monitor-query"
|
||||
version = "2.0.0"
|
||||
description = "Microsoft Corporation Azure Monitor Query Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure_monitor_query-2.0.0-py3-none-any.whl", hash = "sha256:8f52d581271d785e12f49cd5aaa144b8910fb843db2373855a7ef94c7fc462ea"},
|
||||
{file = "azure_monitor_query-2.0.0.tar.gz", hash = "sha256:7b05f2fcac4fb67fc9f77a7d4c5d98a0f3099fb73b57c69ec1b080773994671b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-core = ">=1.30.0"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-storage-blob"
|
||||
version = "12.24.1"
|
||||
@@ -1511,14 +1563,14 @@ with-social = ["django-allauth[socialaccount] (>=64.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "django"
|
||||
version = "5.1.10"
|
||||
version = "5.1.13"
|
||||
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "django-5.1.10-py3-none-any.whl", hash = "sha256:19c9b771e9cf4de91101861aadd2daaa159bcf10698ca909c5755c88e70ccb84"},
|
||||
{file = "django-5.1.10.tar.gz", hash = "sha256:73e5d191421d177803dbd5495d94bc7d06d156df9561f4eea9e11b4994c07137"},
|
||||
{file = "django-5.1.13-py3-none-any.whl", hash = "sha256:06f257f79dc4c17f3f9e23b106a4c5ed1335abecbe731e83c598c941d14fbeed"},
|
||||
{file = "django-5.1.13.tar.gz", hash = "sha256:543ff21679f15e80edfc01fe7ea35f8291b6d4ea589433882913626a7c1cf929"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1872,6 +1924,27 @@ files = [
|
||||
Django = ">=4.2"
|
||||
djangorestframework = ">=3.15.0"
|
||||
|
||||
[[package]]
|
||||
name = "drf-simple-apikey"
|
||||
version = "2.2.1"
|
||||
description = "API Key authentication and permissions for Django REST."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "drf_simple_apikey-2.2.1-py2.py3-none-any.whl", hash = "sha256:2a60b35676d14f907c47dee179dd0fa7425a84c34d6ff5b48d08d3b87ff32809"},
|
||||
{file = "drf_simple_apikey-2.2.1.tar.gz", hash = "sha256:e5a52804bbac12c8db80c10a3d51a8514fc59fc8385b5e751099a2bc944ad25d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=38.0.4"
|
||||
django = ">=4.2"
|
||||
djangorestframework = ">=3.14.0"
|
||||
|
||||
[package.extras]
|
||||
test = ["coverage", "pytest", "pytest-django"]
|
||||
tooling = ["black (==22.3.0)", "bump2version", "pylint"]
|
||||
|
||||
[[package]]
|
||||
name = "drf-spectacular"
|
||||
version = "0.27.2"
|
||||
@@ -2933,6 +3006,22 @@ html5 = ["html5lib"]
|
||||
htmlsoup = ["BeautifulSoup4"]
|
||||
source = ["Cython (>=3.0.11,<3.1.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "markdown"
|
||||
version = "3.9"
|
||||
description = "Python implementation of John Gruber's Markdown."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "markdown-3.9-py3-none-any.whl", hash = "sha256:9f4d91ed810864ea88a6f32c07ba8bee1346c0cc1f6b1f9f6c822f2a9667d280"},
|
||||
{file = "markdown-3.9.tar.gz", hash = "sha256:d2900fe1782bd33bdbbd56859defef70c2e78fc46668f8eb9df3128138f2cb6a"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["mdx_gh_links (>=0.2)", "mkdocs (>=1.6)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"]
|
||||
testing = ["coverage", "pyyaml"]
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "4.0.0"
|
||||
@@ -3987,7 +4076,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "prowler"
|
||||
version = "5.11.0"
|
||||
version = "5.13.0"
|
||||
description = "Prowler is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks."
|
||||
optional = false
|
||||
python-versions = ">3.9.1,<3.13"
|
||||
@@ -4000,6 +4089,7 @@ alive-progress = "3.3.0"
|
||||
awsipranges = "0.3.3"
|
||||
azure-identity = "1.21.0"
|
||||
azure-keyvault-keys = "4.10.0"
|
||||
azure-mgmt-apimanagement = "5.0.0"
|
||||
azure-mgmt-applicationinsights = "4.1.0"
|
||||
azure-mgmt-authorization = "4.0.0"
|
||||
azure-mgmt-compute = "34.0.0"
|
||||
@@ -4008,6 +4098,7 @@ azure-mgmt-containerservice = "34.1.0"
|
||||
azure-mgmt-cosmosdb = "9.7.0"
|
||||
azure-mgmt-databricks = "2.0.0"
|
||||
azure-mgmt-keyvault = "10.3.1"
|
||||
azure-mgmt-loganalytics = "12.0.0"
|
||||
azure-mgmt-monitor = "6.0.2"
|
||||
azure-mgmt-network = "28.1.0"
|
||||
azure-mgmt-rdbms = "10.1.0"
|
||||
@@ -4020,6 +4111,7 @@ azure-mgmt-sql = "3.0.1"
|
||||
azure-mgmt-storage = "22.1.1"
|
||||
azure-mgmt-subscription = "3.1.1"
|
||||
azure-mgmt-web = "8.0.0"
|
||||
azure-monitor-query = "2.0.0"
|
||||
azure-storage-blob = "12.24.1"
|
||||
boto3 = "1.39.15"
|
||||
botocore = "1.39.15"
|
||||
@@ -4031,8 +4123,10 @@ detect-secrets = "1.5.0"
|
||||
dulwich = "0.23.0"
|
||||
google-api-python-client = "2.163.0"
|
||||
google-auth-httplib2 = ">=0.1,<0.3"
|
||||
h2 = "4.3.0"
|
||||
jsonschema = "4.23.0"
|
||||
kubernetes = "32.0.1"
|
||||
markdown = "3.9.0"
|
||||
microsoft-kiota-abstractions = "1.9.2"
|
||||
msgraph-sdk = "1.23.0"
|
||||
numpy = "2.0.2"
|
||||
@@ -4053,7 +4147,7 @@ tzlocal = "5.3.1"
|
||||
type = "git"
|
||||
url = "https://github.com/prowler-cloud/prowler.git"
|
||||
reference = "master"
|
||||
resolved_reference = "525f152e51f82de2110ed158c8dc489e42c289cf"
|
||||
resolved_reference = "a52697bfdfee83d14a49c11dcbe96888b5cd767e"
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
@@ -5223,6 +5317,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
|
||||
@@ -5231,6 +5326,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
|
||||
@@ -5239,6 +5335,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
|
||||
@@ -5247,6 +5344,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
|
||||
@@ -5255,6 +5353,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
|
||||
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
|
||||
@@ -6160,4 +6259,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<3.13"
|
||||
content-hash = "b954196aba7e108cacb94fd15732be7130b27379add09140fabbb55f7335bb7b"
|
||||
content-hash = "03442fd4673006c5a74374f90f53621fd1c9d117279fe6cc0355ef833eb7f9bb"
|
||||
|
||||
@@ -7,7 +7,7 @@ authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django==5.1.10",
|
||||
"django (==5.1.13)",
|
||||
"django-allauth[saml] (>=65.8.0,<66.0.0)",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
@@ -31,7 +31,9 @@ dependencies = [
|
||||
"uuid6==2024.7.10",
|
||||
"openai (>=1.82.0,<2.0.0)",
|
||||
"xmlsec==1.3.14",
|
||||
"h2 (==4.3.0)"
|
||||
"h2 (==4.3.0)",
|
||||
"markdown (>=3.9,<4.0)",
|
||||
"drf-simple-apikey (==2.2.1)"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
@@ -39,7 +41,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.13.0"
|
||||
version = "1.14.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -1,4 +1,26 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from config.custom_logging import BackendLogger
|
||||
from config.env import env
|
||||
from django.apps import AppConfig
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
SIGNING_KEY_ENV = "DJANGO_TOKEN_SIGNING_KEY"
|
||||
VERIFYING_KEY_ENV = "DJANGO_TOKEN_VERIFYING_KEY"
|
||||
|
||||
PRIVATE_KEY_FILE = "jwt_private.pem"
|
||||
PUBLIC_KEY_FILE = "jwt_public.pem"
|
||||
|
||||
KEYS_DIRECTORY = (
|
||||
Path.home() / ".config" / "prowler-api"
|
||||
) # `/home/prowler/.config/prowler-api` inside the container
|
||||
|
||||
_keys_initialized = False # Flag to prevent multiple executions within the same process
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
@@ -6,7 +28,142 @@ class ApiConfig(AppConfig):
|
||||
name = "api"
|
||||
|
||||
def ready(self):
|
||||
from api import schema_extensions # noqa: F401
|
||||
from api import signals # noqa: F401
|
||||
from api.compliance import load_prowler_compliance
|
||||
|
||||
# Generate required cryptographic keys if not present, but only if:
|
||||
# `"manage.py" not in sys.argv`: If an external server (e.g., Gunicorn) is running the app
|
||||
# `os.environ.get("RUN_MAIN")`: If it's not a Django command or using `runserver`,
|
||||
# only the main process will do it
|
||||
if "manage.py" not in sys.argv or os.environ.get("RUN_MAIN"):
|
||||
self._ensure_crypto_keys()
|
||||
|
||||
load_prowler_compliance()
|
||||
|
||||
def _ensure_crypto_keys(self):
|
||||
"""
|
||||
Orchestrator method that ensures all required cryptographic keys are present.
|
||||
This method coordinates the generation of:
|
||||
- RSA key pairs for JWT token signing and verification
|
||||
Note: During development, Django spawns multiple processes (migrations, fixtures, etc.)
|
||||
which will each generate their own keys. This is expected behavior and each process
|
||||
will have consistent keys for its lifetime. In production, set the keys as environment
|
||||
variables to avoid regeneration.
|
||||
"""
|
||||
global _keys_initialized
|
||||
|
||||
# Skip key generation if running tests
|
||||
if hasattr(settings, "TESTING") and settings.TESTING:
|
||||
return
|
||||
|
||||
# Skip if already initialized in this process
|
||||
if _keys_initialized:
|
||||
return
|
||||
|
||||
# Check if both JWT keys are set; if not, generate them
|
||||
signing_key = env.str(SIGNING_KEY_ENV, default="").strip()
|
||||
verifying_key = env.str(VERIFYING_KEY_ENV, default="").strip()
|
||||
|
||||
if not signing_key or not verifying_key:
|
||||
logger.info(
|
||||
f"Generating JWT RSA key pair. In production, set '{SIGNING_KEY_ENV}' and '{VERIFYING_KEY_ENV}' "
|
||||
"environment variables."
|
||||
)
|
||||
self._ensure_jwt_keys()
|
||||
|
||||
# Mark as initialized to prevent future executions in this process
|
||||
_keys_initialized = True
|
||||
|
||||
def _read_key_file(self, file_name):
|
||||
"""
|
||||
Utility method to read the contents of a file.
|
||||
"""
|
||||
file_path = KEYS_DIRECTORY / file_name
|
||||
return file_path.read_text().strip() if file_path.is_file() else None
|
||||
|
||||
def _write_key_file(self, file_name, content, private=True):
|
||||
"""
|
||||
Utility method to write content to a file.
|
||||
"""
|
||||
try:
|
||||
file_path = KEYS_DIRECTORY / file_name
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
file_path.write_text(content)
|
||||
file_path.chmod(0o600 if private else 0o644)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error writing key file '{file_name}': {e}. "
|
||||
f"Please set '{SIGNING_KEY_ENV}' and '{VERIFYING_KEY_ENV}' manually."
|
||||
)
|
||||
raise e
|
||||
|
||||
def _ensure_jwt_keys(self):
|
||||
"""
|
||||
Generate RSA key pairs for JWT token signing and verification
|
||||
if they are not already set in environment variables.
|
||||
"""
|
||||
# Read existing keys from files if they exist
|
||||
signing_key = self._read_key_file(PRIVATE_KEY_FILE)
|
||||
verifying_key = self._read_key_file(PUBLIC_KEY_FILE)
|
||||
|
||||
if not signing_key or not verifying_key:
|
||||
# Generate and store the RSA key pair
|
||||
signing_key, verifying_key = self._generate_jwt_keys()
|
||||
self._write_key_file(PRIVATE_KEY_FILE, signing_key, private=True)
|
||||
self._write_key_file(PUBLIC_KEY_FILE, verifying_key, private=False)
|
||||
logger.info("JWT keys generated and stored successfully")
|
||||
|
||||
else:
|
||||
logger.info("JWT keys already generated")
|
||||
|
||||
# Set environment variables and Django settings
|
||||
os.environ[SIGNING_KEY_ENV] = signing_key
|
||||
settings.SIMPLE_JWT["SIGNING_KEY"] = signing_key
|
||||
|
||||
os.environ[VERIFYING_KEY_ENV] = verifying_key
|
||||
settings.SIMPLE_JWT["VERIFYING_KEY"] = verifying_key
|
||||
|
||||
def _generate_jwt_keys(self):
|
||||
"""
|
||||
Generate and set RSA key pairs for JWT token operations.
|
||||
"""
|
||||
try:
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
|
||||
# Generate RSA key pair
|
||||
private_key = rsa.generate_private_key( # Future improvement: we could read the next values from env vars
|
||||
public_exponent=65537,
|
||||
key_size=2048,
|
||||
)
|
||||
|
||||
# Serialize private key (for signing)
|
||||
private_pem = private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
).decode("utf-8")
|
||||
|
||||
# Serialize public key (for verification)
|
||||
public_key = private_key.public_key()
|
||||
public_pem = public_key.public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
).decode("utf-8")
|
||||
|
||||
logger.debug("JWT RSA key pair generated successfully.")
|
||||
return private_pem, public_pem
|
||||
|
||||
except ImportError as e:
|
||||
logger.warning(
|
||||
"The 'cryptography' package is required for automatic JWT key generation."
|
||||
)
|
||||
raise e
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error generating JWT keys: {e}. Please set '{SIGNING_KEY_ENV}' and '{VERIFYING_KEY_ENV}' manually."
|
||||
)
|
||||
raise e
|
||||
|
||||
@@ -0,0 +1,76 @@
|
||||
from typing import Optional, Tuple
|
||||
from uuid import UUID
|
||||
|
||||
from cryptography.fernet import InvalidToken
|
||||
from django.utils import timezone
|
||||
from drf_simple_apikey.backends import APIKeyAuthentication as BaseAPIKeyAuth
|
||||
from drf_simple_apikey.crypto import get_crypto
|
||||
from rest_framework.authentication import BaseAuthentication
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from rest_framework.request import Request
|
||||
from rest_framework_simplejwt.authentication import JWTAuthentication
|
||||
|
||||
from api.models import TenantAPIKey, TenantAPIKeyManager
|
||||
|
||||
|
||||
class TenantAPIKeyAuthentication(BaseAPIKeyAuth):
|
||||
model = TenantAPIKey
|
||||
|
||||
def __init__(self):
|
||||
self.key_crypto = get_crypto()
|
||||
|
||||
def authenticate(self, request: Request):
|
||||
prefixed_key = self.get_key(request)
|
||||
|
||||
# Split prefix from key (format: pk_xxxxxxxx.encrypted_key)
|
||||
try:
|
||||
prefix, key = prefixed_key.split(TenantAPIKeyManager.separator, 1)
|
||||
except ValueError:
|
||||
raise AuthenticationFailed("Invalid API Key.")
|
||||
|
||||
try:
|
||||
entity, _ = self._authenticate_credentials(request, key)
|
||||
except InvalidToken:
|
||||
raise AuthenticationFailed("Invalid API Key.")
|
||||
|
||||
# Get the API key instance to update last_used_at and retrieve tenant info
|
||||
# We need to decrypt again to get the pk (already validated by _authenticate_credentials)
|
||||
payload = self.key_crypto.decrypt(key)
|
||||
api_key_pk = payload["_pk"]
|
||||
|
||||
# Convert string UUID back to UUID object for lookup
|
||||
if isinstance(api_key_pk, str):
|
||||
api_key_pk = UUID(api_key_pk)
|
||||
|
||||
try:
|
||||
api_key_instance = TenantAPIKey.objects.get(id=api_key_pk, prefix=prefix)
|
||||
except TenantAPIKey.DoesNotExist:
|
||||
raise AuthenticationFailed("Invalid API Key.")
|
||||
|
||||
# Update last_used_at
|
||||
api_key_instance.last_used_at = timezone.now()
|
||||
api_key_instance.save(update_fields=["last_used_at"])
|
||||
|
||||
return entity, {
|
||||
"tenant_id": str(api_key_instance.tenant_id),
|
||||
"sub": str(api_key_instance.entity.id),
|
||||
"api_key_prefix": prefix,
|
||||
}
|
||||
|
||||
|
||||
class CombinedJWTOrAPIKeyAuthentication(BaseAuthentication):
|
||||
jwt_auth = JWTAuthentication()
|
||||
api_key_auth = TenantAPIKeyAuthentication()
|
||||
|
||||
def authenticate(self, request: Request) -> Optional[Tuple[object, dict]]:
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
|
||||
# Prioritize JWT authentication if both are present
|
||||
if auth_header.startswith("Bearer "):
|
||||
return self.jwt_auth.authenticate(request)
|
||||
|
||||
if auth_header.startswith("Api-Key "):
|
||||
return self.api_key_auth.authenticate(request)
|
||||
|
||||
# Default fallback
|
||||
return self.jwt_auth.authenticate(request)
|
||||
@@ -5,8 +5,8 @@ from rest_framework.exceptions import NotAuthenticated
|
||||
from rest_framework.filters import SearchFilter
|
||||
from rest_framework_json_api import filters
|
||||
from rest_framework_json_api.views import ModelViewSet
|
||||
from rest_framework_simplejwt.authentication import JWTAuthentication
|
||||
|
||||
from api.authentication import CombinedJWTOrAPIKeyAuthentication
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import POSTGRES_USER_VAR, rls_transaction
|
||||
from api.filters import CustomDjangoFilterBackend
|
||||
@@ -15,7 +15,7 @@ from api.rbac.permissions import HasPermissions
|
||||
|
||||
|
||||
class BaseViewSet(ModelViewSet):
|
||||
authentication_classes = [JWTAuthentication]
|
||||
authentication_classes = [CombinedJWTOrAPIKeyAuthentication]
|
||||
required_permissions = []
|
||||
permission_classes = [permissions.IsAuthenticated, HasPermissions]
|
||||
filter_backends = [
|
||||
|
||||
@@ -225,6 +225,7 @@ def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
# Build compliance dictionary
|
||||
compliance_dict = {
|
||||
"framework": compliance_data.Framework,
|
||||
"name": compliance_data.Name,
|
||||
"version": compliance_data.Version,
|
||||
"provider": provider_type,
|
||||
"description": compliance_data.Description,
|
||||
|
||||
@@ -61,7 +61,7 @@ def rls_transaction(value: str, parameter: str = POSTGRES_TENANT_VAR):
|
||||
with transaction.atomic():
|
||||
with connection.cursor() as cursor:
|
||||
try:
|
||||
# just in case the value is an UUID object
|
||||
# just in case the value is a UUID object
|
||||
uuid.UUID(str(value))
|
||||
except ValueError:
|
||||
raise ValidationError("Must be a valid UUID")
|
||||
@@ -434,6 +434,12 @@ def drop_index_on_partitions(
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def generate_api_key_prefix():
|
||||
"""Generate a random 8-character prefix for API keys (e.g., 'pk_abc123de')."""
|
||||
random_chars = generate_random_token(length=8)
|
||||
return f"pk_{random_chars}"
|
||||
|
||||
|
||||
# Postgres enum definition for member role
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
from django.core.exceptions import ValidationError as django_validation_error
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework.exceptions import (
|
||||
APIException,
|
||||
AuthenticationFailed,
|
||||
NotAuthenticated,
|
||||
)
|
||||
from rest_framework_json_api.exceptions import exception_handler
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
|
||||
@@ -68,15 +72,18 @@ def custom_exception_handler(exc, context):
|
||||
exc = ValidationError(exc.message_dict)
|
||||
else:
|
||||
exc = ValidationError(detail=exc.messages[0], code=exc.code)
|
||||
elif isinstance(exc, (TokenError, InvalidToken)):
|
||||
if (
|
||||
hasattr(exc, "detail")
|
||||
and isinstance(exc.detail, dict)
|
||||
and "messages" in exc.detail
|
||||
):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
# Force 401 status for AuthenticationFailed exceptions regardless of the authentication backend
|
||||
elif isinstance(exc, (AuthenticationFailed, NotAuthenticated, TokenError)):
|
||||
exc.status_code = status.HTTP_401_UNAUTHORIZED
|
||||
if isinstance(exc, (TokenError, InvalidToken)):
|
||||
if (
|
||||
hasattr(exc, "detail")
|
||||
and isinstance(exc.detail, dict)
|
||||
and "messages" in exc.detail
|
||||
):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
|
||||
|
||||
|
||||
@@ -43,6 +43,7 @@ from api.models import (
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
TenantAPIKey,
|
||||
User,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
@@ -219,10 +220,31 @@ class MembershipFilter(FilterSet):
|
||||
|
||||
|
||||
class ProviderFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
connected = BooleanFilter()
|
||||
inserted_at = DateFilter(
|
||||
field_name="inserted_at",
|
||||
lookup_expr="date",
|
||||
help_text="""Filter by date when the provider was added
|
||||
(format: YYYY-MM-DD)""",
|
||||
)
|
||||
updated_at = DateFilter(
|
||||
field_name="updated_at",
|
||||
lookup_expr="date",
|
||||
help_text="""Filter by date when the provider was updated
|
||||
(format: YYYY-MM-DD)""",
|
||||
)
|
||||
connected = BooleanFilter(
|
||||
help_text="""Filter by connection status. Set to True to return only
|
||||
connected providers, or False to return only providers with failed
|
||||
connections. If not specified, both connected and failed providers are
|
||||
included. Providers with no connection attempt (status is null) are
|
||||
excluded from this filter."""
|
||||
)
|
||||
provider = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
provider__in = ChoiceInFilter(
|
||||
field_name="provider",
|
||||
choices=Provider.ProviderChoices.choices,
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Provider
|
||||
@@ -648,8 +670,16 @@ class LatestFindingFilter(CommonFindingFilters):
|
||||
|
||||
|
||||
class ProviderSecretFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
inserted_at = DateFilter(
|
||||
field_name="inserted_at",
|
||||
lookup_expr="date",
|
||||
help_text="Filter by date when the secret was added (format: YYYY-MM-DD)",
|
||||
)
|
||||
updated_at = DateFilter(
|
||||
field_name="updated_at",
|
||||
lookup_expr="date",
|
||||
help_text="Filter by date when the secret was updated (format: YYYY-MM-DD)",
|
||||
)
|
||||
provider = UUIDFilter(field_name="provider__id", lookup_expr="exact")
|
||||
|
||||
class Meta:
|
||||
@@ -880,3 +910,20 @@ class IntegrationJiraFindingsFilter(FilterSet):
|
||||
}
|
||||
)
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
|
||||
class TenantApiKeyFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="created", lookup_expr="date")
|
||||
inserted_at__gte = DateFilter(field_name="created", lookup_expr="gte")
|
||||
inserted_at__lte = DateFilter(field_name="created", lookup_expr="lte")
|
||||
expires_at = DateFilter(field_name="expiry_date", lookup_expr="date")
|
||||
expires_at__gte = DateFilter(field_name="expiry_date", lookup_expr="gte")
|
||||
expires_at__lte = DateFilter(field_name="expiry_date", lookup_expr="lte")
|
||||
|
||||
class Meta:
|
||||
model = TenantAPIKey
|
||||
fields = {
|
||||
"prefix": ["exact", "icontains"],
|
||||
"revoked": ["exact"],
|
||||
"name": ["exact", "icontains"],
|
||||
}
|
||||
|
||||
@@ -8,9 +8,14 @@ def extract_auth_info(request) -> dict:
|
||||
if getattr(request, "auth", None) is not None:
|
||||
tenant_id = request.auth.get("tenant_id", "N/A")
|
||||
user_id = request.auth.get("sub", "N/A")
|
||||
api_key_prefix = request.auth.get("api_key_prefix", "N/A")
|
||||
else:
|
||||
tenant_id, user_id = "N/A", "N/A"
|
||||
return {"tenant_id": tenant_id, "user_id": user_id}
|
||||
tenant_id, user_id, api_key_prefix = "N/A", "N/A", "N/A"
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"user_id": user_id,
|
||||
"api_key_prefix": api_key_prefix,
|
||||
}
|
||||
|
||||
|
||||
class APILoggingMiddleware:
|
||||
@@ -38,6 +43,7 @@ class APILoggingMiddleware:
|
||||
extra={
|
||||
"user_id": auth_info["user_id"],
|
||||
"tenant_id": auth_info["tenant_id"],
|
||||
"api_key_prefix": auth_info["api_key_prefix"],
|
||||
"method": request.method,
|
||||
"path": request.path,
|
||||
"query_params": request.GET.dict(),
|
||||
|
||||
@@ -0,0 +1,125 @@
|
||||
# Generated by Django 5.1.12 on 2025-09-30 13:10
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
import drf_simple_apikey.models
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0047_remove_integration_unique_configuration_per_tenant"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="TenantAPIKey",
|
||||
fields=[
|
||||
("name", models.CharField(blank=True, max_length=255, null=True)),
|
||||
(
|
||||
"expiry_date",
|
||||
models.DateTimeField(
|
||||
default=drf_simple_apikey.models._expiry_date,
|
||||
help_text="Once API key expires, entities cannot use it anymore.",
|
||||
verbose_name="Expires",
|
||||
),
|
||||
),
|
||||
(
|
||||
"revoked",
|
||||
models.BooleanField(
|
||||
blank=True,
|
||||
default=False,
|
||||
help_text="If the API key is revoked, entities cannot use it anymore. (This cannot be undone.)",
|
||||
),
|
||||
),
|
||||
("created", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"whitelisted_ips",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
help_text="List of allowed IP addresses for this API key.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"blacklisted_ips",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
help_text="List of denied IP addresses for this API key.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"prefix",
|
||||
models.CharField(
|
||||
default=api.db_utils.generate_api_key_prefix,
|
||||
editable=False,
|
||||
help_text="Unique prefix to identify the API key",
|
||||
max_length=11,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"last_used_at",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
help_text="Last time this API key was used for authentication",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"entity",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="user_api_keys",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "api_keys",
|
||||
"abstract": False,
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "prefix"],
|
||||
name="api_keys_tenant_prefix_idx",
|
||||
)
|
||||
],
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "prefix"), name="unique_api_key_prefixes"
|
||||
)
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="tenantapikey",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_tenantapikey",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -22,6 +22,8 @@ from django.db.models import Q
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.models import TaskResult
|
||||
from drf_simple_apikey.crypto import get_crypto
|
||||
from drf_simple_apikey.models import AbstractAPIKey, AbstractAPIKeyManager
|
||||
from psqlextra.manager import PostgresManager
|
||||
from psqlextra.models import PostgresPartitionedModel
|
||||
from psqlextra.types import PostgresPartitioningMethod
|
||||
@@ -42,6 +44,7 @@ from api.db_utils import (
|
||||
StateEnumField,
|
||||
StatusEnumField,
|
||||
enum_to_choices,
|
||||
generate_api_key_prefix,
|
||||
generate_random_token,
|
||||
one_week_from_now,
|
||||
)
|
||||
@@ -125,6 +128,17 @@ class ActiveProviderPartitionedManager(PostgresManager, ActiveProviderManager):
|
||||
return super().get_queryset().filter(self.active_provider_filter())
|
||||
|
||||
|
||||
class TenantAPIKeyManager(AbstractAPIKeyManager):
|
||||
separator = "."
|
||||
|
||||
def assign_api_key(self, obj) -> str:
|
||||
payload = {"_pk": str(obj.pk), "_exp": obj.expiry_date.timestamp()}
|
||||
key = get_crypto().generate(payload)
|
||||
|
||||
prefixed_key = f"{obj.prefix}{self.separator}{key}"
|
||||
return prefixed_key
|
||||
|
||||
|
||||
class User(AbstractBaseUser):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
name = models.CharField(max_length=150, validators=[MinLengthValidator(3)])
|
||||
@@ -204,6 +218,55 @@ class Membership(models.Model):
|
||||
resource_name = "memberships"
|
||||
|
||||
|
||||
class TenantAPIKey(AbstractAPIKey, RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
prefix = models.CharField(
|
||||
max_length=11,
|
||||
unique=True,
|
||||
default=generate_api_key_prefix,
|
||||
editable=False,
|
||||
help_text="Unique prefix to identify the API key",
|
||||
)
|
||||
last_used_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Last time this API key was used for authentication",
|
||||
)
|
||||
entity = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="user_api_keys",
|
||||
)
|
||||
|
||||
objects = TenantAPIKeyManager()
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "api_keys"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "prefix"),
|
||||
name="unique_api_key_prefixes",
|
||||
),
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "prefix"], name="api_keys_tenant_prefix_idx"
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "api-keys"
|
||||
|
||||
|
||||
class Provider(RowLevelSecurityProtectedModel):
|
||||
objects = ActiveProviderManager()
|
||||
all_objects = models.Manager()
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
|
||||
from drf_spectacular.openapi import AutoSchema
|
||||
|
||||
|
||||
class CombinedJWTOrAPIKeyAuthenticationScheme(OpenApiAuthenticationExtension):
|
||||
target_class = "api.authentication.CombinedJWTOrAPIKeyAuthentication"
|
||||
name = "JWT or API Key"
|
||||
|
||||
def get_security_definition(self, auto_schema: AutoSchema): # noqa: F841
|
||||
return {
|
||||
"type": "http",
|
||||
"scheme": "bearer",
|
||||
"bearerFormat": "JWT",
|
||||
"description": "Supports both JWT Bearer tokens and API Key authentication. "
|
||||
"Use `Bearer <token>` for JWT or `Api-Key <key>` for API keys.",
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
from celery import states
|
||||
from celery.signals import before_task_publish
|
||||
from config.celery import celery_app
|
||||
from django.db.models.signals import post_delete
|
||||
from django.db.models.signals import post_delete, pre_delete
|
||||
from django.dispatch import receiver
|
||||
from django_celery_results.backends.database import DatabaseBackend
|
||||
|
||||
from api.db_utils import delete_related_daily_task
|
||||
from api.models import Provider
|
||||
from api.models import Membership, Provider, TenantAPIKey, User
|
||||
|
||||
|
||||
def create_task_result_on_publish(sender=None, headers=None, **kwargs): # noqa: F841
|
||||
@@ -32,3 +32,27 @@ before_task_publish.connect(
|
||||
def delete_provider_scan_task(sender, instance, **kwargs): # noqa: F841
|
||||
# Delete the associated periodic task when the provider is deleted
|
||||
delete_related_daily_task(instance.id)
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=User)
|
||||
def revoke_user_api_keys(sender, instance, **kwargs): # noqa: F841
|
||||
"""
|
||||
Revoke all API keys associated with a user before deletion.
|
||||
|
||||
The entity field will be set to NULL by on_delete=SET_NULL,
|
||||
but we explicitly revoke the keys to prevent further use.
|
||||
"""
|
||||
TenantAPIKey.objects.filter(entity=instance).update(revoked=True)
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Membership)
|
||||
def revoke_membership_api_keys(sender, instance, **kwargs): # noqa: F841
|
||||
"""
|
||||
Revoke all API keys when a user is removed from a tenant.
|
||||
|
||||
When a membership is deleted, all API keys created by that user
|
||||
in that tenant should be revoked to prevent further access.
|
||||
"""
|
||||
TenantAPIKey.objects.filter(
|
||||
entity=instance.user, tenant_id=instance.tenant.id
|
||||
).update(revoked=True)
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import time
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
from django.urls import reverse
|
||||
from drf_simple_apikey.crypto import get_crypto
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from api.models import Membership, User
|
||||
from api.models import Membership, Role, TenantAPIKey, User, UserRoleRelationship
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -298,3 +303,706 @@ class TestTokenSwitchTenant:
|
||||
assert invalid_tenant_response.json()["errors"][0]["detail"] == (
|
||||
"Tenant does not exist or user is not a " "member."
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestAPIKeyAuthentication:
|
||||
def test_successful_authentication_with_api_key(
|
||||
self, create_test_user, tenants_fixture, api_keys_fixture
|
||||
):
|
||||
"""Verify API key can authenticate and access protected endpoints."""
|
||||
client = APIClient()
|
||||
api_key = api_keys_fixture[0]
|
||||
|
||||
# Use API key to authenticate and access protected endpoint
|
||||
api_key_headers = get_api_key_header(api_key._raw_key)
|
||||
response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "data" in response.json()
|
||||
|
||||
def test_api_key_one_time_display_on_creation(
|
||||
self, create_test_user_rbac, tenants_fixture
|
||||
):
|
||||
"""Verify full key only returned on creation, subsequent retrieval shows prefix only."""
|
||||
client = APIClient()
|
||||
|
||||
# Authenticate with JWT to create API key
|
||||
access_token, _ = get_api_tokens(
|
||||
client, create_test_user_rbac.email, TEST_PASSWORD
|
||||
)
|
||||
jwt_headers = get_authorization_header(access_token)
|
||||
|
||||
# Create API key
|
||||
api_key_name = "Test One-Time Key"
|
||||
create_response = client.post(
|
||||
reverse("api-key-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "api-keys",
|
||||
"attributes": {
|
||||
"name": api_key_name,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=jwt_headers,
|
||||
)
|
||||
|
||||
assert create_response.status_code == 201
|
||||
created_data = create_response.json()["data"]
|
||||
api_key_id = created_data["id"]
|
||||
|
||||
# Verify full key is present in creation response
|
||||
assert "api_key" in created_data["attributes"]
|
||||
full_key = created_data["attributes"]["api_key"]
|
||||
assert full_key.startswith("pk_")
|
||||
assert "." in full_key
|
||||
|
||||
# Retrieve the same API key
|
||||
retrieve_response = client.get(
|
||||
reverse("api-key-detail", kwargs={"pk": api_key_id}),
|
||||
headers=jwt_headers,
|
||||
)
|
||||
|
||||
assert retrieve_response.status_code == 200
|
||||
retrieved_data = retrieve_response.json()["data"]
|
||||
|
||||
# Verify full key is NOT present in retrieval response
|
||||
assert "api_key" not in retrieved_data["attributes"]
|
||||
# Only prefix should be visible
|
||||
assert "prefix" in retrieved_data["attributes"]
|
||||
assert retrieved_data["attributes"]["prefix"].startswith("pk_")
|
||||
|
||||
def test_last_used_at_tracking(
|
||||
self, create_test_user, tenants_fixture, api_keys_fixture
|
||||
):
|
||||
"""Verify last_used_at timestamp updates on each authentication."""
|
||||
client = APIClient()
|
||||
api_key = api_keys_fixture[0]
|
||||
|
||||
# Verify initially last_used_at is None
|
||||
assert api_key.last_used_at is None
|
||||
|
||||
# Use API key to authenticate
|
||||
api_key_headers = get_api_key_header(api_key._raw_key)
|
||||
first_response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
assert first_response.status_code == 200
|
||||
|
||||
# Reload from database and check last_used_at is set
|
||||
api_key.refresh_from_db()
|
||||
first_used_at = api_key.last_used_at
|
||||
assert first_used_at is not None
|
||||
|
||||
# Use the same key again after a small delay
|
||||
time.sleep(0.1)
|
||||
|
||||
second_response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
assert second_response.status_code == 200
|
||||
|
||||
# Reload and verify last_used_at was updated
|
||||
api_key.refresh_from_db()
|
||||
second_used_at = api_key.last_used_at
|
||||
assert second_used_at is not None
|
||||
assert second_used_at > first_used_at
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestAPIKeyErrors:
|
||||
def test_invalid_api_key_format_missing_separator(
|
||||
self, create_test_user, tenants_fixture
|
||||
):
|
||||
"""Malformed key without . separator."""
|
||||
client = APIClient()
|
||||
|
||||
# Create malformed key without separator
|
||||
malformed_key = "pk_12345678abcdefgh"
|
||||
api_key_headers = get_api_key_header(malformed_key)
|
||||
|
||||
response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
|
||||
assert response.status_code == 401
|
||||
assert "Invalid API Key." in response.json()["errors"][0]["detail"]
|
||||
|
||||
def test_invalid_api_key_format_malformed(self, create_test_user, tenants_fixture):
|
||||
"""Completely invalid format."""
|
||||
client = APIClient()
|
||||
|
||||
# Various malformed keys
|
||||
malformed_keys = [
|
||||
"invalid_key",
|
||||
"Bearer some_token",
|
||||
"",
|
||||
"pk_.",
|
||||
".encrypted_part",
|
||||
]
|
||||
|
||||
for malformed_key in malformed_keys:
|
||||
api_key_headers = get_api_key_header(malformed_key)
|
||||
response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
|
||||
assert response.status_code == 401
|
||||
assert "Invalid API Key." in response.json()["errors"][0]["detail"]
|
||||
|
||||
def test_expired_api_key_rejected(self, create_test_user, tenants_fixture):
|
||||
"""Key past expiry date returns 401."""
|
||||
client = APIClient()
|
||||
|
||||
# Create API key with past expiry date
|
||||
expired_key, raw_key = TenantAPIKey.objects.create_api_key(
|
||||
name="Expired Key",
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
entity=create_test_user,
|
||||
expiry_date=datetime.now(timezone.utc) - timedelta(days=1),
|
||||
)
|
||||
|
||||
api_key_headers = get_api_key_header(raw_key)
|
||||
response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
|
||||
assert response.status_code == 401
|
||||
assert "API Key has already expired." in response.json()["errors"][0]["detail"]
|
||||
|
||||
def test_revoked_api_key_rejected(
|
||||
self, create_test_user, tenants_fixture, api_keys_fixture
|
||||
):
|
||||
"""Revoked key returns 401."""
|
||||
client = APIClient()
|
||||
|
||||
# Use the revoked key from fixture
|
||||
revoked_key = api_keys_fixture[2]
|
||||
assert revoked_key.revoked is True
|
||||
|
||||
api_key_headers = get_api_key_header(revoked_key._raw_key)
|
||||
response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
|
||||
assert response.status_code == 401
|
||||
assert "API Key has been revoked." in response.json()["errors"][0]["detail"]
|
||||
|
||||
def test_non_existent_api_key(self, create_test_user, tenants_fixture):
|
||||
"""Key UUID doesn't exist in database."""
|
||||
client = APIClient()
|
||||
|
||||
# Create a valid-looking key with non-existent UUID
|
||||
crypto = get_crypto()
|
||||
fake_uuid = str(uuid4())
|
||||
fake_expiry = (datetime.now(timezone.utc) + timedelta(days=30)).timestamp()
|
||||
payload = {"_pk": fake_uuid, "_exp": fake_expiry}
|
||||
encrypted_payload = crypto.generate(payload)
|
||||
|
||||
fake_key = f"pk_fakepfx.{encrypted_payload}"
|
||||
api_key_headers = get_api_key_header(fake_key)
|
||||
|
||||
response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
|
||||
assert response.status_code == 401
|
||||
assert (
|
||||
"No entity matching this api key." in response.json()["errors"][0]["detail"]
|
||||
)
|
||||
|
||||
def test_corrupted_payload(self, create_test_user, tenants_fixture):
|
||||
"""Tampered/corrupted encrypted payload."""
|
||||
client = APIClient()
|
||||
|
||||
# Create key with corrupted encrypted portion
|
||||
corrupted_key = "pk_12345678.corrupted_encrypted_data_here"
|
||||
api_key_headers = get_api_key_header(corrupted_key)
|
||||
|
||||
response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
|
||||
assert response.status_code == 401
|
||||
assert "Invalid API Key." in response.json()["errors"][0]["detail"]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestAPIKeyTenantIsolation:
|
||||
def test_api_key_tenant_isolation(
|
||||
self, create_test_user, tenants_fixture, api_keys_fixture
|
||||
):
|
||||
"""User in tenant A cannot use API key from tenant B."""
|
||||
client = APIClient()
|
||||
|
||||
# Create a second user in a different tenant
|
||||
second_user = User.objects.create_user(
|
||||
name="second_user",
|
||||
email="second_user@prowler.com",
|
||||
password="Test_password@1",
|
||||
)
|
||||
second_tenant = tenants_fixture[1]
|
||||
Membership.objects.create(user=second_user, tenant=second_tenant)
|
||||
|
||||
# Create and assign role to second_user
|
||||
second_role = Role.objects.create(
|
||||
tenant_id=second_tenant.id,
|
||||
name="Second Tenant Role",
|
||||
unlimited_visibility=True,
|
||||
manage_account=True,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=second_user,
|
||||
role=second_role,
|
||||
tenant_id=second_tenant.id,
|
||||
)
|
||||
|
||||
# Create API key for second user in second tenant
|
||||
second_key, raw_key = TenantAPIKey.objects.create_api_key(
|
||||
name="Second Tenant Key",
|
||||
tenant_id=second_tenant.id,
|
||||
entity=second_user,
|
||||
)
|
||||
|
||||
# First user's API key from first tenant
|
||||
first_key = api_keys_fixture[0]
|
||||
tenants_fixture[0]
|
||||
|
||||
# Verify both keys are from different tenants
|
||||
assert first_key.tenant_id != second_key.tenant_id
|
||||
|
||||
# Each key should only access resources in its own tenant
|
||||
# This is enforced by RLS at the database level
|
||||
first_headers = get_api_key_header(first_key._raw_key)
|
||||
second_headers = get_api_key_header(raw_key)
|
||||
|
||||
# Both should work for their respective tenants
|
||||
first_response = client.get(reverse("provider-list"), headers=first_headers)
|
||||
assert first_response.status_code == 200
|
||||
|
||||
second_response = client.get(reverse("provider-list"), headers=second_headers)
|
||||
assert second_response.status_code == 200
|
||||
|
||||
# Verify tenant context is correct in each response
|
||||
# The responses should contain only data for their respective tenants
|
||||
|
||||
def test_api_key_filters_by_tenant(
|
||||
self, create_test_user, tenants_fixture, api_keys_fixture
|
||||
):
|
||||
"""List endpoint only shows keys for current tenant."""
|
||||
client = APIClient()
|
||||
|
||||
# Create JWT token for first tenant
|
||||
access_token, _ = get_api_tokens(client, create_test_user.email, TEST_PASSWORD)
|
||||
jwt_headers = get_authorization_header(access_token)
|
||||
|
||||
# List API keys
|
||||
list_response = client.get(reverse("api-key-list"), headers=jwt_headers)
|
||||
|
||||
assert list_response.status_code == 200
|
||||
keys_data = list_response.json()["data"]
|
||||
|
||||
# Verify all returned keys belong to the current tenant
|
||||
tenants_fixture[0].id
|
||||
for key_data in keys_data:
|
||||
# We can't directly see tenant_id in response, but all keys should be from fixtures
|
||||
# which are created in first tenant
|
||||
assert key_data["type"] == "api-keys"
|
||||
|
||||
# Count should match the number of non-revoked keys in api_keys_fixture for this tenant
|
||||
# api_keys_fixture creates 3 keys (1 normal, 1 with expiry, 1 revoked)
|
||||
assert len(keys_data) == 3
|
||||
|
||||
def test_api_key_revoked_when_user_removed_from_tenant(self, tenants_fixture):
|
||||
"""When user membership is deleted, all user's API keys for that tenant are revoked."""
|
||||
client = APIClient()
|
||||
tenant = tenants_fixture[0]
|
||||
|
||||
# Create a fresh user for this test
|
||||
test_user = User.objects.create_user(
|
||||
name="test_membership_removal",
|
||||
email="membership_removal@prowler.com",
|
||||
password=TEST_PASSWORD,
|
||||
)
|
||||
|
||||
# Create membership between user and tenant
|
||||
Membership.objects.create(
|
||||
user=test_user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
|
||||
# Create role with manage_account permission
|
||||
role = Role.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
name="Membership Removal Role",
|
||||
unlimited_visibility=True,
|
||||
manage_account=True,
|
||||
)
|
||||
|
||||
# Assign role to user
|
||||
UserRoleRelationship.objects.create(
|
||||
user=test_user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
# Create API key for this user in this tenant
|
||||
api_key, raw_key = TenantAPIKey.objects.create_api_key(
|
||||
name="Test Key for Membership Removal",
|
||||
tenant_id=tenant.id,
|
||||
entity=test_user,
|
||||
)
|
||||
|
||||
# Verify API key works initially
|
||||
api_key_headers = get_api_key_header(raw_key)
|
||||
initial_response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
assert initial_response.status_code == 200
|
||||
|
||||
# Store API key ID for later verification
|
||||
api_key_id = api_key.id
|
||||
|
||||
# Remove user from tenant by deleting membership
|
||||
Membership.objects.filter(user=test_user, tenant=tenant).delete()
|
||||
|
||||
# Reload API key from database
|
||||
api_key.refresh_from_db()
|
||||
|
||||
# Verify API key still exists in database
|
||||
assert TenantAPIKey.objects.filter(id=api_key_id).exists()
|
||||
|
||||
# Verify API key is now revoked
|
||||
assert api_key.revoked is True
|
||||
|
||||
# Verify authentication with this API key now fails with 401
|
||||
auth_response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
assert auth_response.status_code == 401
|
||||
|
||||
# Verify error message indicates revocation
|
||||
response_json = auth_response.json()
|
||||
assert "errors" in response_json
|
||||
error_detail = response_json["errors"][0]["detail"]
|
||||
assert "revoked" in error_detail.lower()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestAPIKeyLifecycle:
|
||||
def test_create_api_key(self, create_test_user_rbac, tenants_fixture):
|
||||
"""Create via POST with name and optional expiry."""
|
||||
client = APIClient()
|
||||
|
||||
# Authenticate with JWT
|
||||
access_token, _ = get_api_tokens(
|
||||
client, create_test_user_rbac.email, TEST_PASSWORD
|
||||
)
|
||||
jwt_headers = get_authorization_header(access_token)
|
||||
|
||||
# Create API key without expiry
|
||||
key_name = "Test Lifecycle Key"
|
||||
create_response = client.post(
|
||||
reverse("api-key-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "api-keys",
|
||||
"attributes": {
|
||||
"name": key_name,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=jwt_headers,
|
||||
)
|
||||
|
||||
assert create_response.status_code == 201
|
||||
created_data = create_response.json()["data"]
|
||||
|
||||
assert created_data["attributes"]["name"] == key_name
|
||||
assert "api_key" in created_data["attributes"]
|
||||
assert "prefix" in created_data["attributes"]
|
||||
assert created_data["attributes"]["revoked"] is False
|
||||
|
||||
# Create API key with expiry
|
||||
future_expiry = (datetime.now(timezone.utc) + timedelta(days=90)).isoformat()
|
||||
create_with_expiry_response = client.post(
|
||||
reverse("api-key-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "api-keys",
|
||||
"attributes": {
|
||||
"name": "Key with Expiry",
|
||||
"expires_at": future_expiry,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=jwt_headers,
|
||||
)
|
||||
|
||||
assert create_with_expiry_response.status_code == 201
|
||||
expiry_data = create_with_expiry_response.json()["data"]
|
||||
assert expiry_data["attributes"]["expires_at"] is not None
|
||||
|
||||
def test_update_api_key_name_only(
|
||||
self, create_test_user, tenants_fixture, api_keys_fixture
|
||||
):
|
||||
"""PATCH only allows name changes."""
|
||||
client = APIClient()
|
||||
|
||||
# Authenticate with JWT
|
||||
access_token, _ = get_api_tokens(client, create_test_user.email, TEST_PASSWORD)
|
||||
jwt_headers = get_authorization_header(access_token)
|
||||
|
||||
api_key = api_keys_fixture[0]
|
||||
api_key.name
|
||||
new_name = "Updated API Key Name"
|
||||
|
||||
# Update name
|
||||
update_response = client.patch(
|
||||
reverse("api-key-detail", kwargs={"pk": api_key.id}),
|
||||
data={
|
||||
"data": {
|
||||
"type": "api-keys",
|
||||
"id": str(api_key.id),
|
||||
"attributes": {
|
||||
"name": new_name,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=jwt_headers,
|
||||
)
|
||||
|
||||
assert update_response.status_code == 200
|
||||
updated_data = update_response.json()["data"]
|
||||
assert updated_data["attributes"]["name"] == new_name
|
||||
|
||||
# Verify name was actually updated in database
|
||||
api_key.refresh_from_db()
|
||||
assert api_key.name == new_name
|
||||
|
||||
# Verify other fields remain unchanged
|
||||
assert api_key.prefix == updated_data["attributes"]["prefix"]
|
||||
assert api_key.revoked is False
|
||||
|
||||
def test_delete_api_key(self, create_test_user, tenants_fixture, api_keys_fixture):
|
||||
"""DELETE revokes key (sets revoked=True)."""
|
||||
client = APIClient()
|
||||
|
||||
# Authenticate with JWT
|
||||
access_token, _ = get_api_tokens(client, create_test_user.email, TEST_PASSWORD)
|
||||
jwt_headers = get_authorization_header(access_token)
|
||||
|
||||
api_key = api_keys_fixture[1]
|
||||
api_key_id = api_key.id
|
||||
|
||||
# Revoke API key using the revoke endpoint
|
||||
revoke_response = client.delete(
|
||||
reverse("api-key-revoke", kwargs={"pk": api_key_id}),
|
||||
headers=jwt_headers,
|
||||
)
|
||||
|
||||
assert revoke_response.status_code == 200
|
||||
|
||||
# Verify key still exists but is revoked
|
||||
api_key.refresh_from_db()
|
||||
assert api_key.revoked is True
|
||||
|
||||
# Verify revoked key can no longer authenticate
|
||||
api_key_headers = get_api_key_header(api_key._raw_key)
|
||||
auth_response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
|
||||
assert auth_response.status_code == 401
|
||||
|
||||
def test_multiple_keys_per_user(self, create_test_user_rbac, tenants_fixture):
|
||||
"""User can have multiple active keys."""
|
||||
client = APIClient()
|
||||
|
||||
# Authenticate with JWT
|
||||
access_token, _ = get_api_tokens(
|
||||
client, create_test_user_rbac.email, TEST_PASSWORD
|
||||
)
|
||||
jwt_headers = get_authorization_header(access_token)
|
||||
|
||||
# Create multiple API keys
|
||||
key_names = ["Key One", "Key Two", "Key Three"]
|
||||
created_keys = []
|
||||
|
||||
for name in key_names:
|
||||
create_response = client.post(
|
||||
reverse("api-key-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "api-keys",
|
||||
"attributes": {
|
||||
"name": name,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
headers=jwt_headers,
|
||||
)
|
||||
|
||||
assert create_response.status_code == 201
|
||||
created_keys.append(create_response.json()["data"])
|
||||
|
||||
# Verify all keys were created
|
||||
assert len(created_keys) == 3
|
||||
|
||||
# List all keys and verify count
|
||||
list_response = client.get(reverse("api-key-list"), headers=jwt_headers)
|
||||
assert list_response.status_code == 200
|
||||
|
||||
# Should include the 3 new keys plus the ones from api_keys_fixture
|
||||
keys_list = list_response.json()["data"]
|
||||
assert len(keys_list) >= 3
|
||||
|
||||
# Verify each created key can authenticate independently
|
||||
for key_data in created_keys:
|
||||
full_key = key_data["attributes"]["api_key"]
|
||||
api_key_headers = get_api_key_header(full_key)
|
||||
auth_response = client.get(
|
||||
reverse("provider-list"), headers=api_key_headers
|
||||
)
|
||||
assert auth_response.status_code == 200
|
||||
|
||||
def test_api_key_becomes_invalid_when_user_deleted(self, tenants_fixture):
|
||||
"""When user is deleted, API key entity is set to None and authentication fails."""
|
||||
client = APIClient()
|
||||
tenant = tenants_fixture[0]
|
||||
|
||||
# Create a fresh user for this test to avoid affecting other tests
|
||||
test_user = User.objects.create_user(
|
||||
name="test_deletion_user",
|
||||
email="deletion_test@prowler.com",
|
||||
password=TEST_PASSWORD,
|
||||
)
|
||||
Membership.objects.create(
|
||||
user=test_user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
|
||||
# Create role for the user
|
||||
role = Role.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
name="Deletion Test Role",
|
||||
unlimited_visibility=True,
|
||||
manage_account=True,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=test_user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
# Create API key for this user
|
||||
api_key, raw_key = TenantAPIKey.objects.create_api_key(
|
||||
name="Test Key for Deletion",
|
||||
tenant_id=tenant.id,
|
||||
entity=test_user,
|
||||
)
|
||||
|
||||
# Verify the API key works initially
|
||||
api_key_headers = get_api_key_header(raw_key)
|
||||
initial_response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
assert initial_response.status_code == 200
|
||||
|
||||
# Store the API key ID for later verification
|
||||
api_key_id = api_key.id
|
||||
|
||||
# Delete the user
|
||||
test_user.delete()
|
||||
|
||||
# Reload the API key from database
|
||||
api_key.refresh_from_db()
|
||||
|
||||
# Verify the API key still exists in database (not cascade deleted)
|
||||
assert TenantAPIKey.objects.filter(id=api_key_id).exists()
|
||||
|
||||
# Verify entity field is now None (CASCADE behavior is SET_NULL)
|
||||
assert api_key.entity is None
|
||||
|
||||
# Verify authentication with this API key now fails
|
||||
auth_response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
|
||||
# Must return 401 Unauthorized, not 500 Internal Server Error
|
||||
assert auth_response.status_code == 401, (
|
||||
f"Expected 401 but got {auth_response.status_code}: "
|
||||
f"{auth_response.json()}"
|
||||
)
|
||||
|
||||
# Verify error message is present
|
||||
response_json = auth_response.json()
|
||||
assert "errors" in response_json
|
||||
error_detail = response_json["errors"][0]["detail"]
|
||||
# The error should indicate authentication failed due to invalid/orphaned key
|
||||
assert (
|
||||
"API Key" in error_detail
|
||||
or "Invalid" in error_detail
|
||||
or "entity" in error_detail.lower()
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestCombinedAuthentication:
|
||||
def test_jwt_takes_priority_over_api_key(
|
||||
self, create_test_user, tenants_fixture, api_keys_fixture
|
||||
):
|
||||
"""When Bearer token present, JWT is used."""
|
||||
client = APIClient()
|
||||
|
||||
# Get JWT token
|
||||
access_token, _ = get_api_tokens(client, create_test_user.email, TEST_PASSWORD)
|
||||
|
||||
# Create headers with both Bearer (JWT) and API key would conflict
|
||||
# But we'll test that Bearer takes priority by setting Authorization to Bearer
|
||||
jwt_headers = {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
response = client.get(reverse("provider-list"), headers=jwt_headers)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
# The authentication should have used JWT, not API key
|
||||
# We can verify this worked as JWT authentication
|
||||
|
||||
def test_api_key_header_format_validation(
|
||||
self, create_test_user, tenants_fixture, api_keys_fixture
|
||||
):
|
||||
"""Verify Authorization: Api-Key <key> format."""
|
||||
client = APIClient()
|
||||
|
||||
api_key = api_keys_fixture[0]
|
||||
|
||||
# Correct format
|
||||
correct_headers = {"Authorization": f"Api-Key {api_key._raw_key}"}
|
||||
correct_response = client.get(reverse("provider-list"), headers=correct_headers)
|
||||
assert correct_response.status_code == 200
|
||||
|
||||
# Wrong format - using Bearer instead of Api-Key
|
||||
wrong_format_headers = {"Authorization": f"Bearer {api_key._raw_key}"}
|
||||
wrong_response = client.get(
|
||||
reverse("provider-list"), headers=wrong_format_headers
|
||||
)
|
||||
# Should fail because it tries to parse as JWT
|
||||
assert wrong_response.status_code == 401
|
||||
|
||||
# Wrong format - missing Api-Key prefix
|
||||
no_prefix_headers = {"Authorization": api_key._raw_key}
|
||||
no_prefix_response = client.get(
|
||||
reverse("provider-list"), headers=no_prefix_headers
|
||||
)
|
||||
assert no_prefix_response.status_code == 401
|
||||
|
||||
def test_concurrent_api_key_usage(
|
||||
self, create_test_user, tenants_fixture, api_keys_fixture
|
||||
):
|
||||
"""Same key can be used multiple times concurrently."""
|
||||
client = APIClient()
|
||||
|
||||
api_key = api_keys_fixture[0]
|
||||
api_key_headers = get_api_key_header(api_key._raw_key)
|
||||
|
||||
# Make multiple concurrent requests with the same key
|
||||
responses = []
|
||||
for _ in range(5):
|
||||
response = client.get(reverse("provider-list"), headers=api_key_headers)
|
||||
responses.append(response)
|
||||
|
||||
# All requests should succeed
|
||||
for response in responses:
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify last_used_at was updated
|
||||
api_key.refresh_from_db()
|
||||
assert api_key.last_used_at is not None
|
||||
|
||||
|
||||
def get_api_key_header(api_key: str) -> dict:
|
||||
"""Helper to create API key authorization header."""
|
||||
return {"Authorization": f"Api-Key {api_key}"}
|
||||
|
||||
@@ -0,0 +1,152 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
|
||||
import api.apps as api_apps_module
|
||||
from api.apps import (
|
||||
ApiConfig,
|
||||
PRIVATE_KEY_FILE,
|
||||
PUBLIC_KEY_FILE,
|
||||
SIGNING_KEY_ENV,
|
||||
VERIFYING_KEY_ENV,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_keys_initialized(monkeypatch):
|
||||
"""Ensure per-test clean state for the module-level guard flag."""
|
||||
monkeypatch.setattr(api_apps_module, "_keys_initialized", False, raising=False)
|
||||
|
||||
|
||||
def _stub_keys():
|
||||
return (
|
||||
"""-----BEGIN PRIVATE KEY-----\nPRIVATE\n-----END PRIVATE KEY-----\n""",
|
||||
"""-----BEGIN PUBLIC KEY-----\nPUBLIC\n-----END PUBLIC KEY-----\n""",
|
||||
)
|
||||
|
||||
|
||||
def test_generate_jwt_keys_when_missing(monkeypatch, tmp_path):
|
||||
# Arrange: isolate FS, env, and settings; force generation path
|
||||
monkeypatch.setattr(
|
||||
api_apps_module, "KEYS_DIRECTORY", Path(tmp_path), raising=False
|
||||
)
|
||||
monkeypatch.delenv(SIGNING_KEY_ENV, raising=False)
|
||||
monkeypatch.delenv(VERIFYING_KEY_ENV, raising=False)
|
||||
|
||||
# Work on a copy of SIMPLE_JWT to avoid mutating the global settings dict for other tests
|
||||
monkeypatch.setattr(
|
||||
settings, "SIMPLE_JWT", settings.SIMPLE_JWT.copy(), raising=False
|
||||
)
|
||||
monkeypatch.setattr(settings, "TESTING", False, raising=False)
|
||||
|
||||
# Avoid dependency on the cryptography package
|
||||
monkeypatch.setattr(ApiConfig, "_generate_jwt_keys", staticmethod(_stub_keys))
|
||||
|
||||
config = ApiConfig("api", api_apps_module)
|
||||
|
||||
# Act
|
||||
config._ensure_crypto_keys()
|
||||
|
||||
# Assert: files created with expected content
|
||||
priv_path = Path(tmp_path) / PRIVATE_KEY_FILE
|
||||
pub_path = Path(tmp_path) / PUBLIC_KEY_FILE
|
||||
assert priv_path.is_file()
|
||||
assert pub_path.is_file()
|
||||
assert priv_path.read_text() == _stub_keys()[0]
|
||||
assert pub_path.read_text() == _stub_keys()[1]
|
||||
|
||||
# Env vars and Django settings updated
|
||||
assert os.environ[SIGNING_KEY_ENV] == _stub_keys()[0]
|
||||
assert os.environ[VERIFYING_KEY_ENV] == _stub_keys()[1]
|
||||
assert settings.SIMPLE_JWT["SIGNING_KEY"] == _stub_keys()[0]
|
||||
assert settings.SIMPLE_JWT["VERIFYING_KEY"] == _stub_keys()[1]
|
||||
|
||||
|
||||
def test_ensure_crypto_keys_are_idempotent_within_process(monkeypatch, tmp_path):
|
||||
# Arrange
|
||||
monkeypatch.setattr(
|
||||
api_apps_module, "KEYS_DIRECTORY", Path(tmp_path), raising=False
|
||||
)
|
||||
monkeypatch.delenv(SIGNING_KEY_ENV, raising=False)
|
||||
monkeypatch.delenv(VERIFYING_KEY_ENV, raising=False)
|
||||
monkeypatch.setattr(
|
||||
settings, "SIMPLE_JWT", settings.SIMPLE_JWT.copy(), raising=False
|
||||
)
|
||||
monkeypatch.setattr(settings, "TESTING", False, raising=False)
|
||||
|
||||
mock_generate = MagicMock(side_effect=_stub_keys)
|
||||
monkeypatch.setattr(ApiConfig, "_generate_jwt_keys", staticmethod(mock_generate))
|
||||
|
||||
config = ApiConfig("api", api_apps_module)
|
||||
|
||||
# Act: first call should generate, second should be a no-op (guard flag)
|
||||
config._ensure_crypto_keys()
|
||||
config._ensure_crypto_keys()
|
||||
|
||||
# Assert: generation occurred exactly once
|
||||
assert mock_generate.call_count == 1
|
||||
|
||||
|
||||
def test_ensure_jwt_keys_uses_existing_files(monkeypatch, tmp_path):
|
||||
# Arrange: pre-create key files
|
||||
monkeypatch.setattr(
|
||||
api_apps_module, "KEYS_DIRECTORY", Path(tmp_path), raising=False
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
settings, "SIMPLE_JWT", settings.SIMPLE_JWT.copy(), raising=False
|
||||
)
|
||||
|
||||
existing_private, existing_public = _stub_keys()
|
||||
|
||||
(Path(tmp_path) / PRIVATE_KEY_FILE).write_text(existing_private)
|
||||
(Path(tmp_path) / PUBLIC_KEY_FILE).write_text(existing_public)
|
||||
|
||||
# If generation were called, fail the test
|
||||
def _fail_generate():
|
||||
raise AssertionError("_generate_jwt_keys should not be called when files exist")
|
||||
|
||||
monkeypatch.setattr(ApiConfig, "_generate_jwt_keys", staticmethod(_fail_generate))
|
||||
|
||||
config = ApiConfig("api", api_apps_module)
|
||||
|
||||
# Act: call the lower-level method directly to set env/settings from files
|
||||
config._ensure_jwt_keys()
|
||||
|
||||
# Assert
|
||||
# _read_key_file() strips trailing newlines; environment/settings should reflect stripped content
|
||||
assert os.environ[SIGNING_KEY_ENV] == existing_private.strip()
|
||||
assert os.environ[VERIFYING_KEY_ENV] == existing_public.strip()
|
||||
assert settings.SIMPLE_JWT["SIGNING_KEY"] == existing_private.strip()
|
||||
assert settings.SIMPLE_JWT["VERIFYING_KEY"] == existing_public.strip()
|
||||
|
||||
|
||||
def test_ensure_crypto_keys_skips_when_env_vars(monkeypatch, tmp_path):
|
||||
# Arrange: put values in env so the orchestrator doesn't generate
|
||||
monkeypatch.setattr(
|
||||
api_apps_module, "KEYS_DIRECTORY", Path(tmp_path), raising=False
|
||||
)
|
||||
monkeypatch.setenv(SIGNING_KEY_ENV, "ENV-PRIVATE")
|
||||
monkeypatch.setenv(VERIFYING_KEY_ENV, "ENV-PUBLIC")
|
||||
monkeypatch.setattr(
|
||||
settings, "SIMPLE_JWT", settings.SIMPLE_JWT.copy(), raising=False
|
||||
)
|
||||
monkeypatch.setattr(settings, "TESTING", False, raising=False)
|
||||
|
||||
called = {"ensure": False}
|
||||
|
||||
def _track_call():
|
||||
called["ensure"] = True
|
||||
return _stub_keys()
|
||||
|
||||
monkeypatch.setattr(ApiConfig, "_generate_jwt_keys", staticmethod(_track_call))
|
||||
|
||||
config = ApiConfig("api", api_apps_module)
|
||||
|
||||
# Act
|
||||
config._ensure_crypto_keys()
|
||||
|
||||
# Assert: orchestrator did not trigger generation when env present
|
||||
assert called["ensure"] is False
|
||||
@@ -239,6 +239,7 @@ class TestCompliance:
|
||||
Framework="Framework 1",
|
||||
Version="1.0",
|
||||
Description="Description of compliance1",
|
||||
Name="Compliance 1",
|
||||
)
|
||||
prowler_compliance = {"aws": {"compliance1": compliance1}}
|
||||
|
||||
@@ -248,6 +249,7 @@ class TestCompliance:
|
||||
"aws": {
|
||||
"compliance1": {
|
||||
"framework": "Framework 1",
|
||||
"name": "Compliance 1",
|
||||
"version": "1.0",
|
||||
"provider": "aws",
|
||||
"description": "Description of compliance1",
|
||||
|
||||
@@ -11,6 +11,7 @@ from api.db_utils import (
|
||||
batch_delete,
|
||||
create_objects_in_batches,
|
||||
enum_to_choices,
|
||||
generate_api_key_prefix,
|
||||
generate_random_token,
|
||||
one_week_from_now,
|
||||
update_objects_in_batches,
|
||||
@@ -313,3 +314,28 @@ class TestUpdateObjectsInBatches:
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
|
||||
assert qs.count() == total
|
||||
|
||||
|
||||
class TestGenerateApiKeyPrefix:
|
||||
def test_prefix_format(self):
|
||||
"""Test that generated prefix starts with 'pk_'."""
|
||||
prefix = generate_api_key_prefix()
|
||||
assert prefix.startswith("pk_")
|
||||
|
||||
def test_prefix_length(self):
|
||||
"""Test that prefix has correct length (pk_ + 8 random chars = 11)."""
|
||||
prefix = generate_api_key_prefix()
|
||||
assert len(prefix) == 11
|
||||
|
||||
def test_prefix_uniqueness(self):
|
||||
"""Test that multiple generations produce unique prefixes."""
|
||||
prefixes = {generate_api_key_prefix() for _ in range(100)}
|
||||
assert len(prefixes) == 100
|
||||
|
||||
def test_prefix_character_set(self):
|
||||
"""Test that random part uses only allowed characters."""
|
||||
allowed_chars = "23456789ABCDEFGHJKMNPQRSTVWXYZ"
|
||||
for _ in range(50):
|
||||
prefix = generate_api_key_prefix()
|
||||
random_part = prefix[3:] # Strip 'pk_'
|
||||
assert all(char in allowed_chars for char in random_part)
|
||||
|
||||
@@ -24,6 +24,7 @@ def test_api_logging_middleware_logging(mock_logger):
|
||||
mock_extract_auth_info.return_value = {
|
||||
"user_id": "user123",
|
||||
"tenant_id": "tenant456",
|
||||
"api_key_prefix": "pk_test",
|
||||
}
|
||||
|
||||
with patch("api.middleware.logging.getLogger") as mock_get_logger:
|
||||
@@ -44,6 +45,7 @@ def test_api_logging_middleware_logging(mock_logger):
|
||||
expected_extra = {
|
||||
"user_id": "user123",
|
||||
"tenant_id": "tenant456",
|
||||
"api_key_prefix": "pk_test",
|
||||
"method": "GET",
|
||||
"path": "/test-path",
|
||||
"query_params": {"param1": "value1", "param2": "value2"},
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import json
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
@@ -151,6 +152,221 @@ class TestUserViewSet:
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["email"] == "rbac_limited@rbac.com"
|
||||
|
||||
def test_me_shows_own_roles_and_memberships_without_manage_account(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(reverse("user-me"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
rels = response.json()["data"]["relationships"]
|
||||
|
||||
# Self should see own roles and memberships even without manage_account
|
||||
assert isinstance(rels["roles"]["data"], list)
|
||||
assert rels["memberships"]["meta"]["count"] == 1
|
||||
|
||||
def test_me_shows_roles_and_memberships_with_manage_account(
|
||||
self, authenticated_client_rbac
|
||||
):
|
||||
response = authenticated_client_rbac.get(reverse("user-me"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
rels = response.json()["data"]["relationships"]
|
||||
|
||||
# Roles should have data when manage_account is True
|
||||
assert len(rels["roles"]["data"]) > 0
|
||||
|
||||
# Memberships should be present and count > 0
|
||||
assert rels["memberships"]["meta"]["count"] > 0
|
||||
|
||||
def test_me_include_roles_and_memberships_included_block(
|
||||
self, authenticated_client_rbac
|
||||
):
|
||||
# Request current user info including roles and memberships
|
||||
response = authenticated_client_rbac.get(
|
||||
reverse("user-me"), {"include": "roles,memberships"}
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
payload = response.json()
|
||||
|
||||
# Included must contain memberships corresponding to relationships data
|
||||
rel_memberships = payload["data"]["relationships"]["memberships"]
|
||||
ids_in_relationship = {item["id"] for item in rel_memberships["data"]}
|
||||
|
||||
included = payload["included"]
|
||||
included_membership_ids = {
|
||||
item["id"] for item in included if item["type"] == "memberships"
|
||||
}
|
||||
|
||||
# If there are memberships in relationships, they must be present in included
|
||||
if ids_in_relationship:
|
||||
assert ids_in_relationship.issubset(included_membership_ids)
|
||||
else:
|
||||
# At minimum, included should contain the user's membership when requested
|
||||
# (count should align with meta count)
|
||||
assert rel_memberships["meta"]["count"] == len(included_membership_ids)
|
||||
|
||||
def test_list_users_with_manage_account_only_forbidden(
|
||||
self, authenticated_client_rbac_manage_account
|
||||
):
|
||||
response = authenticated_client_rbac_manage_account.get(reverse("user-list"))
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_retrieve_other_user_with_manage_account_only_forbidden(
|
||||
self, authenticated_client_rbac_manage_account, create_test_user
|
||||
):
|
||||
response = authenticated_client_rbac_manage_account.get(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_list_users_with_manage_users_only_hides_relationships(
|
||||
self, authenticated_client_rbac_manage_users_only
|
||||
):
|
||||
# Ensure there is at least one other user in the same tenant
|
||||
mu_user = authenticated_client_rbac_manage_users_only.user
|
||||
mu_membership = Membership.objects.filter(user=mu_user).first()
|
||||
tenant = mu_membership.tenant
|
||||
|
||||
other_user = User.objects.create_user(
|
||||
name="other_in_tenant",
|
||||
email="other_in_tenant@rbac.com",
|
||||
password="Password123@",
|
||||
)
|
||||
Membership.objects.create(user=other_user, tenant=tenant)
|
||||
|
||||
response = authenticated_client_rbac_manage_users_only.get(reverse("user-list"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]
|
||||
assert isinstance(data, list)
|
||||
|
||||
current_user_id = str(mu_user.id)
|
||||
assert any(item["id"] == current_user_id for item in data)
|
||||
|
||||
for item in data:
|
||||
rels = item["relationships"]
|
||||
if item["id"] == current_user_id:
|
||||
# Self should see own relationships
|
||||
assert isinstance(rels["roles"]["data"], list)
|
||||
assert rels["memberships"]["meta"].get("count", 0) >= 1
|
||||
else:
|
||||
# Others should be hidden without manage_account
|
||||
assert rels["roles"]["data"] == []
|
||||
assert rels["memberships"]["data"] == []
|
||||
assert rels["memberships"]["meta"]["count"] == 0
|
||||
|
||||
def test_include_roles_hidden_without_manage_account(
|
||||
self, authenticated_client_rbac_manage_users_only
|
||||
):
|
||||
# Arrange: ensure another user in the same tenant with its own role
|
||||
mu_user = authenticated_client_rbac_manage_users_only.user
|
||||
mu_membership = Membership.objects.filter(user=mu_user).first()
|
||||
tenant = mu_membership.tenant
|
||||
|
||||
other_user = User.objects.create_user(
|
||||
name="other_in_tenant_inc",
|
||||
email="other_in_tenant_inc@rbac.com",
|
||||
password="Password123@",
|
||||
)
|
||||
Membership.objects.create(user=other_user, tenant=tenant)
|
||||
other_role = Role.objects.create(
|
||||
name="other_inc_role",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=other_user, role=other_role, tenant_id=tenant.id
|
||||
)
|
||||
|
||||
response = authenticated_client_rbac_manage_users_only.get(
|
||||
reverse("user-list"), {"include": "roles"}
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
payload = response.json()
|
||||
|
||||
# Assert: included must not contain the other user's role
|
||||
included = payload.get("included", [])
|
||||
included_role_ids = {
|
||||
item["id"] for item in included if item.get("type") == "roles"
|
||||
}
|
||||
assert str(other_role.id) not in included_role_ids
|
||||
|
||||
# Relationships for other user should be empty
|
||||
for item in payload["data"]:
|
||||
if item["id"] == str(other_user.id):
|
||||
rels = item["relationships"]
|
||||
assert rels["roles"]["data"] == []
|
||||
|
||||
def test_include_roles_visible_with_manage_account(
|
||||
self, authenticated_client_rbac, tenants_fixture
|
||||
):
|
||||
# Arrange: another user in tenant[0] with its role
|
||||
tenant = tenants_fixture[0]
|
||||
other_user = User.objects.create_user(
|
||||
name="other_with_role",
|
||||
email="other_with_role@rbac.com",
|
||||
password="Password123@",
|
||||
)
|
||||
Membership.objects.create(user=other_user, tenant=tenant)
|
||||
other_role = Role.objects.create(
|
||||
name="other_visible_role",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=other_user, role=other_role, tenant_id=tenant.id
|
||||
)
|
||||
|
||||
response = authenticated_client_rbac.get(
|
||||
reverse("user-list"), {"include": "roles"}
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
payload = response.json()
|
||||
|
||||
# Assert: included must contain the other user's role
|
||||
included = payload.get("included", [])
|
||||
included_role_ids = {
|
||||
item["id"] for item in included if item.get("type") == "roles"
|
||||
}
|
||||
assert str(other_role.id) in included_role_ids
|
||||
|
||||
def test_retrieve_user_with_manage_users_only_hides_relationships(
|
||||
self, authenticated_client_rbac_manage_users_only
|
||||
):
|
||||
# Create a target user in the same tenant to ensure visibility
|
||||
mu_user = authenticated_client_rbac_manage_users_only.user
|
||||
mu_membership = Membership.objects.filter(user=mu_user).first()
|
||||
tenant = mu_membership.tenant
|
||||
|
||||
target_user = User.objects.create_user(
|
||||
name="target_same_tenant",
|
||||
email="target_same_tenant@rbac.com",
|
||||
password="Password123@",
|
||||
)
|
||||
Membership.objects.create(user=target_user, tenant=tenant)
|
||||
|
||||
response = authenticated_client_rbac_manage_users_only.get(
|
||||
reverse("user-detail", kwargs={"pk": target_user.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
rels = response.json()["data"]["relationships"]
|
||||
assert rels["roles"]["data"] == []
|
||||
assert rels["memberships"]["data"] == []
|
||||
assert rels["memberships"]["meta"]["count"] == 0
|
||||
|
||||
def test_list_users_with_all_permissions_shows_relationships(
|
||||
self, authenticated_client_rbac
|
||||
):
|
||||
response = authenticated_client_rbac.get(reverse("user-list"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]
|
||||
assert isinstance(data, list)
|
||||
|
||||
rels = data[0]["relationships"]
|
||||
assert len(rels["roles"]["data"]) >= 0
|
||||
assert rels["memberships"]["meta"]["count"] >= 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestProviderViewSet:
|
||||
@@ -494,3 +710,123 @@ class TestLimitedVisibility:
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestRolePermissions:
|
||||
def test_role_create_with_manage_account_only_allowed(
|
||||
self, authenticated_client_rbac_manage_account
|
||||
):
|
||||
data = {
|
||||
"data": {
|
||||
"type": "roles",
|
||||
"attributes": {
|
||||
"name": "Role Manage Account Only",
|
||||
"manage_users": "false",
|
||||
"manage_account": "true",
|
||||
"manage_providers": "false",
|
||||
"manage_scans": "false",
|
||||
"unlimited_visibility": "false",
|
||||
},
|
||||
"relationships": {"provider_groups": {"data": []}},
|
||||
}
|
||||
}
|
||||
response = authenticated_client_rbac_manage_account.post(
|
||||
reverse("role-list"),
|
||||
data=json.dumps(data),
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
|
||||
def test_role_create_with_manage_users_only_forbidden(
|
||||
self, authenticated_client_rbac_manage_users_only
|
||||
):
|
||||
data = {
|
||||
"data": {
|
||||
"type": "roles",
|
||||
"attributes": {
|
||||
"name": "Role Manage Users Only",
|
||||
"manage_users": "true",
|
||||
"manage_account": "false",
|
||||
"manage_providers": "false",
|
||||
"manage_scans": "false",
|
||||
"unlimited_visibility": "false",
|
||||
},
|
||||
"relationships": {"provider_groups": {"data": []}},
|
||||
}
|
||||
}
|
||||
response = authenticated_client_rbac_manage_users_only.post(
|
||||
reverse("role-list"),
|
||||
data=json.dumps(data),
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestUserRoleLinkPermissions:
|
||||
def test_link_user_roles_with_manage_account_only_allowed(
|
||||
self, authenticated_client_rbac_manage_account
|
||||
):
|
||||
# Arrange: create a second user in the same tenant as the manage_account user
|
||||
ma_user = authenticated_client_rbac_manage_account.user
|
||||
ma_membership = Membership.objects.filter(user=ma_user).first()
|
||||
tenant = ma_membership.tenant
|
||||
|
||||
user2 = User.objects.create_user(
|
||||
name="target_user",
|
||||
email="target_user_ma@rbac.com",
|
||||
password="Password123@",
|
||||
)
|
||||
Membership.objects.create(user=user2, tenant=tenant)
|
||||
|
||||
# Create a role in the same tenant
|
||||
role = Role.objects.create(
|
||||
name="linkable_role",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
)
|
||||
|
||||
data = {"data": [{"type": "roles", "id": str(role.id)}]}
|
||||
|
||||
# Act
|
||||
response = authenticated_client_rbac_manage_account.post(
|
||||
reverse("user-roles-relationship", kwargs={"pk": user2.id}),
|
||||
data=data,
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert response.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
def test_link_user_roles_with_manage_users_only_forbidden(
|
||||
self, authenticated_client_rbac_manage_users_only
|
||||
):
|
||||
mu_user = authenticated_client_rbac_manage_users_only.user
|
||||
mu_membership = Membership.objects.filter(user=mu_user).first()
|
||||
tenant = mu_membership.tenant
|
||||
|
||||
user2 = User.objects.create_user(
|
||||
name="target_user2",
|
||||
email="target_user_mu@rbac.com",
|
||||
password="Password123@",
|
||||
)
|
||||
Membership.objects.create(user=user2, tenant=tenant)
|
||||
|
||||
role = Role.objects.create(
|
||||
name="linkable_role_mu",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
)
|
||||
|
||||
data = {"data": [{"type": "roles", "id": str(role.id)}]}
|
||||
|
||||
response = authenticated_client_rbac_manage_users_only.post(
|
||||
reverse("user-roles-relationship", kwargs={"pk": user2.id}),
|
||||
data=data,
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
@@ -15,6 +15,7 @@ from rest_framework_simplejwt.exceptions import TokenError
|
||||
from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.exceptions import ConflictException
|
||||
from api.models import (
|
||||
Finding,
|
||||
@@ -38,6 +39,7 @@ from api.models import (
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
TenantAPIKey,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
@@ -259,8 +261,15 @@ class UserSerializer(BaseSerializerV1):
|
||||
Serializer for the User model.
|
||||
"""
|
||||
|
||||
memberships = serializers.ResourceRelatedField(many=True, read_only=True)
|
||||
roles = serializers.ResourceRelatedField(many=True, read_only=True)
|
||||
# We use SerializerMethodResourceRelatedField so includes (e.g. ?include=roles)
|
||||
# respect RBAC and do not leak relationships of other users when the requester
|
||||
# lacks manage_account. The visibility logic lives in get_roles/get_memberships.
|
||||
memberships = SerializerMethodResourceRelatedField(
|
||||
many=True, read_only=True, source="memberships", method_name="get_memberships"
|
||||
)
|
||||
roles = SerializerMethodResourceRelatedField(
|
||||
many=True, read_only=True, source="roles", method_name="get_roles"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
@@ -278,9 +287,35 @@ class UserSerializer(BaseSerializerV1):
|
||||
}
|
||||
|
||||
included_serializers = {
|
||||
"roles": "api.v1.serializers.RoleSerializer",
|
||||
"roles": "api.v1.serializers.RoleIncludeSerializer",
|
||||
"memberships": "api.v1.serializers.MembershipIncludeSerializer",
|
||||
}
|
||||
|
||||
def _can_view_relationships(self, instance) -> bool:
|
||||
"""Allow self to view own relationships. Require manage_account to view others."""
|
||||
role = self.context.get("role")
|
||||
request = self.context.get("request")
|
||||
is_self = bool(
|
||||
request
|
||||
and getattr(request, "user", None)
|
||||
and getattr(instance, "id", None) == request.user.id
|
||||
)
|
||||
return is_self or (role and role.manage_account)
|
||||
|
||||
def get_roles(self, instance):
|
||||
return (
|
||||
instance.roles.all()
|
||||
if self._can_view_relationships(instance)
|
||||
else Role.objects.none()
|
||||
)
|
||||
|
||||
def get_memberships(self, instance):
|
||||
return (
|
||||
instance.memberships.all()
|
||||
if self._can_view_relationships(instance)
|
||||
else Membership.objects.none()
|
||||
)
|
||||
|
||||
|
||||
class UserCreateSerializer(BaseWriteSerializer):
|
||||
password = serializers.CharField(write_only=True)
|
||||
@@ -388,6 +423,34 @@ class UserRoleRelationshipSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
roles = Role.objects.filter(id__in=role_ids)
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
# Safeguard: A tenant must always have at least one user with MANAGE_ACCOUNT.
|
||||
# If the target roles do NOT include MANAGE_ACCOUNT, and the current user is
|
||||
# the only one in the tenant with MANAGE_ACCOUNT, block the update.
|
||||
target_includes_manage_account = roles.filter(manage_account=True).exists()
|
||||
if not target_includes_manage_account:
|
||||
# Check if any other user has MANAGE_ACCOUNT
|
||||
other_users_have_manage_account = (
|
||||
UserRoleRelationship.objects.filter(
|
||||
tenant_id=tenant_id, role__manage_account=True
|
||||
)
|
||||
.exclude(user_id=instance.id)
|
||||
.exists()
|
||||
)
|
||||
|
||||
# Check if the current user has MANAGE_ACCOUNT
|
||||
instance_has_manage_account = instance.roles.filter(
|
||||
tenant_id=tenant_id, manage_account=True
|
||||
).exists()
|
||||
|
||||
# If the current user is the last holder of MANAGE_ACCOUNT, prevent removal
|
||||
if instance_has_manage_account and not other_users_have_manage_account:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"roles": "At least one user in the tenant must retain MANAGE_ACCOUNT. "
|
||||
"Assign MANAGE_ACCOUNT to another user before removing it here."
|
||||
}
|
||||
)
|
||||
|
||||
instance.roles.clear()
|
||||
new_relationships = [
|
||||
UserRoleRelationship(user=instance, role=r, tenant_id=tenant_id)
|
||||
@@ -502,6 +565,12 @@ class TenantSerializer(BaseSerializerV1):
|
||||
fields = ["id", "name", "memberships"]
|
||||
|
||||
|
||||
class TenantIncludeSerializer(BaseSerializerV1):
|
||||
class Meta:
|
||||
model = Tenant
|
||||
fields = ["id", "name"]
|
||||
|
||||
|
||||
# Memberships
|
||||
|
||||
|
||||
@@ -523,6 +592,29 @@ class MembershipSerializer(serializers.ModelSerializer):
|
||||
fields = ["id", "user", "tenant", "role", "date_joined"]
|
||||
|
||||
|
||||
class MembershipIncludeSerializer(serializers.ModelSerializer):
|
||||
"""
|
||||
Include-oriented Membership serializer that enables including tenant objects with names
|
||||
without altering the base MembershipSerializer behavior.
|
||||
"""
|
||||
|
||||
role = MemberRoleEnumSerializerField()
|
||||
user = serializers.ResourceRelatedField(read_only=True)
|
||||
tenant = SerializerMethodResourceRelatedField(read_only=True, source="tenant")
|
||||
|
||||
class Meta:
|
||||
model = Membership
|
||||
fields = ["id", "user", "tenant", "role", "date_joined"]
|
||||
|
||||
included_serializers = {"tenant": "api.v1.serializers.TenantIncludeSerializer"}
|
||||
|
||||
def get_tenant(self, instance):
|
||||
try:
|
||||
return Tenant.objects.using(MainRouter.admin_db).get(id=instance.tenant_id)
|
||||
except Tenant.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
# Provider Groups
|
||||
class ProviderGroupSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
providers = serializers.ResourceRelatedField(
|
||||
@@ -817,6 +909,17 @@ class ProviderCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"uid",
|
||||
# "scanner_args"
|
||||
]
|
||||
extra_kwargs = {
|
||||
"alias": {
|
||||
"help_text": "Human readable name to identify the provider, e.g. 'Production AWS Account', 'Dev Environment'",
|
||||
},
|
||||
"provider": {
|
||||
"help_text": "Type of provider to create.",
|
||||
},
|
||||
"uid": {
|
||||
"help_text": "Unique identifier for the provider, set by the provider, e.g. AWS account ID, Azure subscription ID, GCP project ID, etc.",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ProviderUpdateSerializer(BaseWriteSerializer):
|
||||
@@ -831,6 +934,11 @@ class ProviderUpdateSerializer(BaseWriteSerializer):
|
||||
"alias",
|
||||
# "scanner_args"
|
||||
]
|
||||
extra_kwargs = {
|
||||
"alias": {
|
||||
"help_text": "Human readable name to identify the provider, e.g. 'Production AWS Account', 'Dev Environment'",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Scans
|
||||
@@ -1678,6 +1786,37 @@ class RoleUpdateSerializer(RoleSerializer):
|
||||
|
||||
if "users" in validated_data:
|
||||
users = validated_data.pop("users")
|
||||
# Prevent a user from removing their own role assignment via Role update
|
||||
request = self.context.get("request")
|
||||
if request and getattr(request, "user", None):
|
||||
request_user = request.user
|
||||
is_currently_assigned = instance.users.filter(
|
||||
id=request_user.id
|
||||
).exists()
|
||||
will_be_assigned = any(u.id == request_user.id for u in users)
|
||||
if is_currently_assigned and not will_be_assigned:
|
||||
raise serializers.ValidationError(
|
||||
{"users": "Users cannot remove their own role."}
|
||||
)
|
||||
|
||||
# Safeguard MANAGE_ACCOUNT coverage when updating users of this role
|
||||
if instance.manage_account:
|
||||
# Existing MANAGE_ACCOUNT assignments on other roles within the tenant
|
||||
other_ma_exists = (
|
||||
UserRoleRelationship.objects.filter(
|
||||
tenant_id=tenant_id, role__manage_account=True
|
||||
)
|
||||
.exclude(role_id=instance.id)
|
||||
.exists()
|
||||
)
|
||||
|
||||
if not other_ma_exists and len(users) == 0:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"users": "At least one user in the tenant must retain MANAGE_ACCOUNT. "
|
||||
"Assign this MANAGE_ACCOUNT role to at least one user or ensure another user has it."
|
||||
}
|
||||
)
|
||||
instance.users.clear()
|
||||
through_model_instances = [
|
||||
UserRoleRelationship(
|
||||
@@ -1692,6 +1831,37 @@ class RoleUpdateSerializer(RoleSerializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class RoleIncludeSerializer(RLSSerializer):
|
||||
permission_state = serializers.SerializerMethodField()
|
||||
|
||||
def get_permission_state(self, obj) -> str:
|
||||
return obj.permission_state
|
||||
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"manage_users",
|
||||
"manage_account",
|
||||
# Disable for the first release
|
||||
# "manage_billing",
|
||||
# /Disable for the first release
|
||||
"manage_integrations",
|
||||
"manage_providers",
|
||||
"manage_scans",
|
||||
"permission_state",
|
||||
"unlimited_visibility",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class ProviderGroupResourceIdentifierSerializer(serializers.Serializer):
|
||||
resource_type = serializers.CharField(source="type")
|
||||
id = serializers.UUIDField()
|
||||
@@ -1806,6 +1976,7 @@ class ComplianceOverviewDetailSerializer(serializers.Serializer):
|
||||
|
||||
class ComplianceOverviewAttributesSerializer(serializers.Serializer):
|
||||
id = serializers.CharField()
|
||||
compliance_name = serializers.CharField()
|
||||
framework_description = serializers.CharField()
|
||||
name = serializers.CharField()
|
||||
framework = serializers.CharField()
|
||||
@@ -2581,3 +2752,103 @@ class LighthouseConfigUpdateSerializer(BaseWriteSerializer):
|
||||
instance.api_key_decoded = api_key
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
|
||||
# API Keys
|
||||
|
||||
|
||||
class TenantApiKeySerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the TenantApiKey model.
|
||||
"""
|
||||
|
||||
# Map database field names to API field names for consistency
|
||||
expires_at = serializers.DateTimeField(source="expiry_date", read_only=True)
|
||||
inserted_at = serializers.DateTimeField(source="created", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = TenantAPIKey
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"prefix",
|
||||
"expires_at",
|
||||
"revoked",
|
||||
"inserted_at",
|
||||
"last_used_at",
|
||||
"entity",
|
||||
]
|
||||
|
||||
|
||||
class TenantApiKeyCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""Serializer for creating new API keys."""
|
||||
|
||||
# Map database field names to API field names for consistency
|
||||
expires_at = serializers.DateTimeField(source="expiry_date", required=False)
|
||||
inserted_at = serializers.DateTimeField(source="created", read_only=True)
|
||||
api_key = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = TenantAPIKey
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"prefix",
|
||||
"expires_at",
|
||||
"revoked",
|
||||
"entity",
|
||||
"inserted_at",
|
||||
"last_used_at",
|
||||
"api_key",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"prefix": {"read_only": True},
|
||||
"revoked": {"read_only": True},
|
||||
"entity": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"last_used_at": {"read_only": True},
|
||||
"api_key": {"read_only": True},
|
||||
}
|
||||
|
||||
def get_api_key(self, obj):
|
||||
"""Return the raw API key if it was stored during creation."""
|
||||
return getattr(obj, "_raw_api_key", None)
|
||||
|
||||
def create(self, validated_data):
|
||||
instance, raw_api_key = TenantAPIKey.objects.create_api_key(
|
||||
**validated_data,
|
||||
tenant_id=self.context.get("tenant_id"),
|
||||
entity=self.context.get("request").user,
|
||||
)
|
||||
# Store the raw API key temporarily on the instance for the serializer
|
||||
instance._raw_api_key = raw_api_key
|
||||
return instance
|
||||
|
||||
|
||||
class TenantApiKeyUpdateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""Serializer for updating API keys - only allows changing the name."""
|
||||
|
||||
# Map database field names to API field names for consistency
|
||||
expires_at = serializers.DateTimeField(source="expiry_date", read_only=True)
|
||||
inserted_at = serializers.DateTimeField(source="created", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = TenantAPIKey
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"prefix",
|
||||
"expires_at",
|
||||
"entity",
|
||||
"inserted_at",
|
||||
"last_used_at",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"prefix": {"read_only": True},
|
||||
"entity": {"read_only": True},
|
||||
"expires_at": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"last_used_at": {"read_only": True},
|
||||
}
|
||||
|
||||
@@ -39,6 +39,7 @@ from api.v1.views import (
|
||||
TenantViewSet,
|
||||
UserRoleRelationshipView,
|
||||
UserViewSet,
|
||||
TenantApiKeyViewSet,
|
||||
)
|
||||
|
||||
router = routers.DefaultRouter(trailing_slash=False)
|
||||
@@ -65,6 +66,7 @@ router.register(
|
||||
LighthouseConfigViewSet,
|
||||
basename="lighthouseconfiguration",
|
||||
)
|
||||
router.register(r"api-keys", TenantApiKeyViewSet, basename="api-key")
|
||||
|
||||
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
|
||||
tenants_router.register(
|
||||
|
||||
@@ -95,6 +95,7 @@ from api.filters import (
|
||||
ScanSummarySeverityFilter,
|
||||
ServiceOverviewFilter,
|
||||
TaskFilter,
|
||||
TenantApiKeyFilter,
|
||||
TenantFilter,
|
||||
UserFilter,
|
||||
)
|
||||
@@ -124,6 +125,7 @@ from api.models import (
|
||||
SeverityChoices,
|
||||
StateChoices,
|
||||
Task,
|
||||
TenantAPIKey,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
@@ -189,6 +191,9 @@ from api.v1.serializers import (
|
||||
ScanUpdateSerializer,
|
||||
ScheduleDailyCreateSerializer,
|
||||
TaskSerializer,
|
||||
TenantApiKeyCreateSerializer,
|
||||
TenantApiKeySerializer,
|
||||
TenantApiKeyUpdateSerializer,
|
||||
TenantSerializer,
|
||||
TokenRefreshSerializer,
|
||||
TokenSerializer,
|
||||
@@ -300,7 +305,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.13.0"
|
||||
spectacular_settings.VERSION = "1.14.0"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
@@ -387,6 +392,11 @@ class SchemaView(SpectacularAPIView):
|
||||
"description": "Endpoints for Single Sign-On authentication management via SAML for seamless user "
|
||||
"authentication.",
|
||||
},
|
||||
{
|
||||
"name": "API Keys",
|
||||
"description": "Endpoints for API keys management. These can be used as an alternative to JWT "
|
||||
"authorization.",
|
||||
},
|
||||
]
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
@@ -768,11 +778,13 @@ class UserViewSet(BaseUserViewset):
|
||||
# If called during schema generation, return an empty queryset
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return User.objects.none()
|
||||
|
||||
queryset = (
|
||||
User.objects.filter(membership__tenant__id=self.request.tenant_id)
|
||||
if hasattr(self.request, "tenant_id")
|
||||
else User.objects.all()
|
||||
)
|
||||
|
||||
return queryset.prefetch_related("memberships", "roles")
|
||||
|
||||
def get_permissions(self):
|
||||
@@ -790,6 +802,12 @@ class UserViewSet(BaseUserViewset):
|
||||
else:
|
||||
return UserSerializer
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
if self.request.user.is_authenticated:
|
||||
context["role"] = get_role(self.request.user)
|
||||
return context
|
||||
|
||||
@action(detail=False, methods=["get"], url_name="me")
|
||||
def me(self, request):
|
||||
user = self.request.user
|
||||
@@ -803,7 +821,9 @@ class UserViewSet(BaseUserViewset):
|
||||
if kwargs["pk"] != str(self.request.user.id):
|
||||
raise ValidationError("Only the current user can be deleted.")
|
||||
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
user = self.get_object()
|
||||
user.delete(using=MainRouter.admin_db)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
@@ -894,7 +914,11 @@ class UserViewSet(BaseUserViewset):
|
||||
partial_update=extend_schema(
|
||||
tags=["User"],
|
||||
summary="Partially update a user-roles relationship",
|
||||
description="Update the user-roles relationship information without affecting other fields.",
|
||||
description=(
|
||||
"Update the user-roles relationship information without affecting other fields. "
|
||||
"If the update would remove MANAGE_ACCOUNT from the last remaining user in the "
|
||||
"tenant, the API rejects the request with a 400 response."
|
||||
),
|
||||
responses={
|
||||
204: OpenApiResponse(
|
||||
response=None, description="Relationship updated successfully"
|
||||
@@ -904,7 +928,12 @@ class UserViewSet(BaseUserViewset):
|
||||
destroy=extend_schema(
|
||||
tags=["User"],
|
||||
summary="Delete a user-roles relationship",
|
||||
description="Remove the user-roles relationship from the system by their ID.",
|
||||
description=(
|
||||
"Remove the user-roles relationship from the system by their ID. If removing "
|
||||
"MANAGE_ACCOUNT would take it away from the last remaining user in the tenant, "
|
||||
"the API rejects the request with a 400 response. Users also cannot delete their "
|
||||
"own role assignments; attempting to do so returns a 400 response."
|
||||
),
|
||||
responses={
|
||||
204: OpenApiResponse(
|
||||
response=None, description="Relationship deleted successfully"
|
||||
@@ -919,11 +948,48 @@ class UserRoleRelationshipView(RelationshipView, BaseRLSViewSet):
|
||||
http_method_names = ["post", "patch", "delete"]
|
||||
schema = RelationshipViewSchema()
|
||||
# RBAC required permissions
|
||||
required_permissions = [Permissions.MANAGE_USERS]
|
||||
required_permissions = [Permissions.MANAGE_ACCOUNT]
|
||||
|
||||
def get_queryset(self):
|
||||
return User.objects.filter(membership__tenant__id=self.request.tenant_id)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
"""
|
||||
Prevent deleting role relationships if it would leave the tenant with no
|
||||
users having MANAGE_ACCOUNT. Supports deleting specific roles via JSON:API
|
||||
relationship payload or clearing all roles for the user when no payload.
|
||||
"""
|
||||
user = self.get_object()
|
||||
# Disallow deleting own roles
|
||||
if str(user.id) == str(request.user.id):
|
||||
return Response(
|
||||
data={
|
||||
"detail": "Users cannot delete the relationship with their role."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
tenant_id = self.request.tenant_id
|
||||
payload = request.data if isinstance(request.data, dict) else None
|
||||
|
||||
# If a user has more than one role, we will delete the relationship with the roles in the payload
|
||||
data = payload.get("data") if payload else None
|
||||
if data:
|
||||
try:
|
||||
role_ids = [item["id"] for item in data]
|
||||
except KeyError:
|
||||
role_ids = []
|
||||
roles_to_remove = Role.objects.filter(id__in=role_ids, tenant_id=tenant_id)
|
||||
else:
|
||||
roles_to_remove = user.roles.filter(tenant_id=tenant_id)
|
||||
|
||||
UserRoleRelationship.objects.filter(
|
||||
user=user,
|
||||
tenant_id=tenant_id,
|
||||
role_id__in=roles_to_remove.values_list("id", flat=True),
|
||||
).delete()
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
user = self.get_object()
|
||||
|
||||
@@ -962,12 +1028,6 @@ class UserRoleRelationshipView(RelationshipView, BaseRLSViewSet):
|
||||
serializer.save()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
user = self.get_object()
|
||||
user.roles.clear()
|
||||
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
@@ -2872,13 +2932,11 @@ class InvitationAcceptViewSet(BaseRLSViewSet):
|
||||
partial_update=extend_schema(
|
||||
tags=["Role"],
|
||||
summary="Partially update a role",
|
||||
description="Update certain fields of an existing role's information without affecting other fields.",
|
||||
responses={200: RoleSerializer},
|
||||
),
|
||||
destroy=extend_schema(
|
||||
tags=["Role"],
|
||||
summary="Delete a role",
|
||||
description="Remove a role from the system by their ID.",
|
||||
),
|
||||
)
|
||||
class RoleViewSet(BaseRLSViewSet):
|
||||
@@ -2900,6 +2958,14 @@ class RoleViewSet(BaseRLSViewSet):
|
||||
return RoleUpdateSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
@extend_schema(
|
||||
description=(
|
||||
"Update selected fields on an existing role. When changing the `users` "
|
||||
"relationship of a role that grants MANAGE_ACCOUNT, the API blocks attempts "
|
||||
"that would leave the tenant without any MANAGE_ACCOUNT assignees and prevents "
|
||||
"callers from removing their own assignment to that role."
|
||||
)
|
||||
)
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
user_role = get_role(request.user)
|
||||
# If the user is the owner of the role, the manage_account field is not editable
|
||||
@@ -2907,6 +2973,12 @@ class RoleViewSet(BaseRLSViewSet):
|
||||
request.data["manage_account"] = str(user_role.manage_account).lower()
|
||||
return super().partial_update(request, *args, **kwargs)
|
||||
|
||||
@extend_schema(
|
||||
description=(
|
||||
"Delete the specified role. The API rejects deletion of the last role "
|
||||
"in the tenant that grants MANAGE_ACCOUNT."
|
||||
)
|
||||
)
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if (
|
||||
@@ -2914,6 +2986,21 @@ class RoleViewSet(BaseRLSViewSet):
|
||||
): # TODO: Move to a constant/enum (in case other roles are created by default)
|
||||
raise ValidationError(detail="The admin role cannot be deleted.")
|
||||
|
||||
# Prevent deleting the last MANAGE_ACCOUNT role in the tenant
|
||||
if instance.manage_account:
|
||||
has_other_ma = (
|
||||
Role.objects.filter(tenant_id=instance.tenant_id, manage_account=True)
|
||||
.exclude(id=instance.id)
|
||||
.exists()
|
||||
)
|
||||
if not has_other_ma:
|
||||
return Response(
|
||||
data={
|
||||
"detail": "Cannot delete the only role with MANAGE_ACCOUNT in the tenant."
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
@@ -3470,6 +3557,7 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
),
|
||||
"name": requirement.get("name", ""),
|
||||
"framework": compliance_framework.get("framework", ""),
|
||||
"compliance_name": compliance_framework.get("name", ""),
|
||||
"version": compliance_framework.get("version", ""),
|
||||
"description": requirement.get("description", ""),
|
||||
"attributes": base_attributes,
|
||||
@@ -4112,3 +4200,84 @@ class ProcessorViewSet(BaseRLSViewSet):
|
||||
elif self.action == "partial_update":
|
||||
return ProcessorUpdateSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
tags=["API Keys"],
|
||||
summary="List API keys",
|
||||
description="Retrieve a list of API keys for the tenant, with filtering support.",
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
tags=["API Keys"],
|
||||
summary="Retrieve API key details",
|
||||
description="Fetch detailed information about a specific API key by its ID.",
|
||||
),
|
||||
create=extend_schema(
|
||||
tags=["API Keys"],
|
||||
summary="Create a new API key",
|
||||
description="Create a new API key for the tenant.",
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
tags=["API Keys"],
|
||||
summary="Partially update an API key",
|
||||
description="Modify certain fields of an existing API key without affecting other settings.",
|
||||
),
|
||||
revoke=extend_schema(
|
||||
tags=["API Keys"],
|
||||
summary="Revoke an API key",
|
||||
description="Revoke an API key by its ID. This action is irreversible and will prevent the key from being "
|
||||
"used.",
|
||||
request=None,
|
||||
responses={
|
||||
200: OpenApiResponse(
|
||||
response=TenantApiKeySerializer,
|
||||
description="API key was successfully revoked",
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
class TenantApiKeyViewSet(BaseRLSViewSet):
|
||||
queryset = TenantAPIKey.objects.all()
|
||||
serializer_class = TenantApiKeySerializer
|
||||
filterset_class = TenantApiKeyFilter
|
||||
http_method_names = ["get", "post", "patch", "delete"]
|
||||
ordering = ["revoked", "-created"]
|
||||
ordering_fields = ["name", "prefix", "revoked", "inserted_at", "expires_at"]
|
||||
# RBAC required permissions
|
||||
required_permissions = [Permissions.MANAGE_ACCOUNT]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = TenantAPIKey.objects.filter(
|
||||
tenant_id=self.request.tenant_id
|
||||
).annotate(inserted_at=F("created"), expires_at=F("expiry_date"))
|
||||
return queryset
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "create":
|
||||
return TenantApiKeyCreateSerializer
|
||||
elif self.action == "partial_update":
|
||||
return TenantApiKeyUpdateSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
@extend_schema(exclude=True)
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
raise MethodNotAllowed(method="DESTROY")
|
||||
|
||||
@action(detail=True, methods=["delete"])
|
||||
def revoke(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
|
||||
# Check if already revoked
|
||||
if instance.revoked:
|
||||
raise ValidationError(
|
||||
{
|
||||
"detail": "API key is already revoked",
|
||||
}
|
||||
)
|
||||
|
||||
TenantAPIKey.objects.revoke_api_key(instance.pk)
|
||||
instance.refresh_from_db()
|
||||
|
||||
serializer = self.get_serializer(instance)
|
||||
return Response(data=serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -48,6 +48,10 @@ class NDJSONFormatter(logging.Formatter):
|
||||
log_record["user_id"] = record.user_id
|
||||
if hasattr(record, "tenant_id"):
|
||||
log_record["tenant_id"] = record.tenant_id
|
||||
if hasattr(record, "api_key_prefix"):
|
||||
log_record["api_key_prefix"] = (
|
||||
record.api_key_prefix if record.api_key_prefix != "N/A" else None
|
||||
)
|
||||
if hasattr(record, "method"):
|
||||
log_record["method"] = record.method
|
||||
if hasattr(record, "path"):
|
||||
@@ -90,6 +94,9 @@ class HumanReadableFormatter(logging.Formatter):
|
||||
# Add REST API extra fields
|
||||
if hasattr(record, "user_id"):
|
||||
log_components.append(f"({record.user_id})")
|
||||
if hasattr(record, "api_key_prefix"):
|
||||
if record.api_key_prefix != "N/A":
|
||||
log_components.append(f"(API-Key {record.api_key_prefix})")
|
||||
if hasattr(record, "tenant_id"):
|
||||
log_components.append(f"[{record.tenant_id}]")
|
||||
if hasattr(record, "method"):
|
||||
|
||||
@@ -43,6 +43,7 @@ INSTALLED_APPS = [
|
||||
"allauth.socialaccount.providers.saml",
|
||||
"dj_rest_auth.registration",
|
||||
"rest_framework.authtoken",
|
||||
"drf_simple_apikey",
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
@@ -84,7 +85,7 @@ TEMPLATES = [
|
||||
REST_FRAMEWORK = {
|
||||
"DEFAULT_SCHEMA_CLASS": "drf_spectacular_jsonapi.schemas.openapi.JsonApiAutoSchema",
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": (
|
||||
"rest_framework_simplejwt.authentication.JWTAuthentication",
|
||||
"api.authentication.CombinedJWTOrAPIKeyAuthentication",
|
||||
),
|
||||
"PAGE_SIZE": 10,
|
||||
"EXCEPTION_HANDLER": "api.exceptions.custom_exception_handler",
|
||||
@@ -220,7 +221,8 @@ SIMPLE_JWT = {
|
||||
"JTI_CLAIM": "jti",
|
||||
"USER_ID_FIELD": "id",
|
||||
"USER_ID_CLAIM": "sub",
|
||||
# Issuer and Audience claims, for the moment we will keep these values as default values, they may change in the future.
|
||||
# Issuer and Audience claims, for the moment we will keep these values as default values, they may change in the
|
||||
# future.
|
||||
"AUDIENCE": env.str("DJANGO_JWT_AUDIENCE", "https://api.prowler.com"),
|
||||
"ISSUER": env.str("DJANGO_JWT_ISSUER", "https://api.prowler.com"),
|
||||
# Additional security settings
|
||||
@@ -229,6 +231,13 @@ SIMPLE_JWT = {
|
||||
|
||||
SECRETS_ENCRYPTION_KEY = env.str("DJANGO_SECRETS_ENCRYPTION_KEY", "")
|
||||
|
||||
# DRF Simple API Key settings
|
||||
DRF_API_KEY = {
|
||||
"FERNET_SECRET": SECRETS_ENCRYPTION_KEY,
|
||||
"API_KEY_LIFETIME": 365,
|
||||
"AUTHENTICATION_KEYWORD_HEADER": "Api-Key",
|
||||
}
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/5.0/topics/i18n/
|
||||
|
||||
|
||||
@@ -20,6 +20,13 @@ DATABASE_ROUTERS = []
|
||||
TESTING = True
|
||||
SECRETS_ENCRYPTION_KEY = "ZMiYVo7m4Fbe2eXXPyrwxdJss2WSalXSv3xHBcJkPl0="
|
||||
|
||||
# DRF Simple API Key settings
|
||||
DRF_API_KEY = {
|
||||
"FERNET_SECRET": SECRETS_ENCRYPTION_KEY,
|
||||
"API_KEY_LIFETIME": 365,
|
||||
"AUTHENTICATION_KEYWORD_HEADER": "Api-Key",
|
||||
}
|
||||
|
||||
# JWT
|
||||
|
||||
SIMPLE_JWT["ALGORITHM"] = "HS256" # noqa: F405
|
||||
|
||||
@@ -38,6 +38,7 @@ from api.models import (
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
TenantAPIKey,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
@@ -191,6 +192,108 @@ def create_test_user_rbac_limited(django_db_setup, django_db_blocker):
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def create_test_user_rbac_manage_account(django_db_setup, django_db_blocker):
|
||||
"""User with only manage_account permission (no manage_users)."""
|
||||
with django_db_blocker.unblock():
|
||||
user = User.objects.create_user(
|
||||
name="testing_manage_account",
|
||||
email="rbac_manage_account@rbac.com",
|
||||
password=TEST_PASSWORD,
|
||||
)
|
||||
tenant = Tenant.objects.create(
|
||||
name="Tenant Test Manage Account",
|
||||
)
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
role = Role.objects.create(
|
||||
name="manage_account",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=False,
|
||||
manage_account=True,
|
||||
manage_billing=False,
|
||||
manage_providers=False,
|
||||
manage_integrations=False,
|
||||
manage_scans=False,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_rbac_manage_account(
|
||||
create_test_user_rbac_manage_account, tenants_fixture, client
|
||||
):
|
||||
client.user = create_test_user_rbac_manage_account
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": "rbac_manage_account@rbac.com",
|
||||
"password": TEST_PASSWORD,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def create_test_user_rbac_manage_users_only(django_db_setup, django_db_blocker):
|
||||
"""User with only manage_users permission (no manage_account)."""
|
||||
with django_db_blocker.unblock():
|
||||
user = User.objects.create_user(
|
||||
name="testing_manage_users_only",
|
||||
email="rbac_manage_users_only@rbac.com",
|
||||
password=TEST_PASSWORD,
|
||||
)
|
||||
tenant = Tenant.objects.create(name="Tenant Test Manage Users Only")
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
role = Role.objects.create(
|
||||
name="manage_users_only",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=False,
|
||||
manage_billing=False,
|
||||
manage_providers=False,
|
||||
manage_integrations=False,
|
||||
manage_scans=False,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
UserRoleRelationship.objects.create(user=user, role=role, tenant_id=tenant.id)
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_rbac_manage_users_only(
|
||||
create_test_user_rbac_manage_users_only, client
|
||||
):
|
||||
client.user = create_test_user_rbac_manage_users_only
|
||||
serializer = TokenSerializer(
|
||||
data={
|
||||
"type": "tokens",
|
||||
"email": "rbac_manage_users_only@rbac.com",
|
||||
"password": TEST_PASSWORD,
|
||||
}
|
||||
)
|
||||
serializer.is_valid()
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client_rbac(create_test_user_rbac, tenants_fixture, client):
|
||||
client.user = create_test_user_rbac
|
||||
@@ -1266,6 +1369,56 @@ def saml_sociallogin(users_fixture):
|
||||
return sociallogin
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def api_keys_fixture(tenants_fixture, create_test_user):
|
||||
"""Create test API keys for testing."""
|
||||
tenant = tenants_fixture[0]
|
||||
user = create_test_user
|
||||
|
||||
# Create and assign role to user for API key authentication
|
||||
role = Role.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
name="Test API Key Role",
|
||||
unlimited_visibility=True,
|
||||
manage_account=True,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
# Create API keys with different states
|
||||
api_key1, raw_key1 = TenantAPIKey.objects.create_api_key(
|
||||
name="Test API Key 1",
|
||||
tenant_id=tenant.id,
|
||||
entity=user,
|
||||
)
|
||||
|
||||
api_key2, raw_key2 = TenantAPIKey.objects.create_api_key(
|
||||
name="Test API Key 2",
|
||||
tenant_id=tenant.id,
|
||||
entity=user,
|
||||
expiry_date=datetime.now(timezone.utc) + timedelta(days=60),
|
||||
)
|
||||
|
||||
# Revoked API key
|
||||
api_key3, raw_key3 = TenantAPIKey.objects.create_api_key(
|
||||
name="Revoked API Key",
|
||||
tenant_id=tenant.id,
|
||||
entity=user,
|
||||
)
|
||||
api_key3.revoked = True
|
||||
api_key3.save()
|
||||
|
||||
# Store raw keys on instances for testing
|
||||
api_key1._raw_key = raw_key1
|
||||
api_key2._raw_key = raw_key2
|
||||
api_key3._raw_key = raw_key3
|
||||
|
||||
return [api_key1, api_key2, api_key3]
|
||||
|
||||
|
||||
def get_authorization_header(access_token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
|
||||
@@ -461,7 +461,7 @@ def backfill_scan_resource_summaries_task(tenant_id: str, scan_id: str):
|
||||
return backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-compliance-overviews", queue="overview")
|
||||
@shared_task(base=RLSTask, name="scan-compliance-overviews", queue="compliance")
|
||||
def create_compliance_requirements_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Creates detailed compliance requirement records for a scan.
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
/* Override Tailwind CSS reset for markdown content */
|
||||
.markdown-content ul {
|
||||
list-style: disc !important;
|
||||
margin-left: 20px !important;
|
||||
padding-left: 10px !important;
|
||||
margin-bottom: 8px !important;
|
||||
}
|
||||
|
||||
.markdown-content ol {
|
||||
list-style: decimal !important;
|
||||
margin-left: 20px !important;
|
||||
padding-left: 10px !important;
|
||||
margin-bottom: 8px !important;
|
||||
}
|
||||
|
||||
.markdown-content li {
|
||||
margin-bottom: 4px !important;
|
||||
display: list-item !important;
|
||||
}
|
||||
|
||||
.markdown-content p {
|
||||
margin-bottom: 8px !important;
|
||||
}
|
||||
|
||||
/* Ensure nested lists work properly */
|
||||
.markdown-content ul ul {
|
||||
margin-top: 4px !important;
|
||||
margin-bottom: 4px !important;
|
||||
}
|
||||
|
||||
.markdown-content ol ol {
|
||||
margin-top: 4px !important;
|
||||
margin-bottom: 4px !important;
|
||||
}
|
||||
@@ -1654,6 +1654,39 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
# Description as first details item
|
||||
html.Div(
|
||||
[
|
||||
html.P(
|
||||
html.Strong(
|
||||
"Description: ",
|
||||
style={
|
||||
"margin-bottom": "8px"
|
||||
},
|
||||
)
|
||||
),
|
||||
html.Div(
|
||||
dcc.Markdown(
|
||||
str(
|
||||
data.get(
|
||||
"DESCRIPTION",
|
||||
"",
|
||||
)
|
||||
),
|
||||
dangerously_allow_html=True,
|
||||
style={
|
||||
"margin-left": "0px",
|
||||
"padding-left": "10px",
|
||||
},
|
||||
),
|
||||
className="markdown-content",
|
||||
style={
|
||||
"margin-left": "0px",
|
||||
"padding-left": "10px",
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.P(
|
||||
@@ -1793,19 +1826,27 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
html.P(
|
||||
html.Strong(
|
||||
"Risk: ",
|
||||
style={
|
||||
"margin-right": "5px"
|
||||
},
|
||||
style={},
|
||||
)
|
||||
),
|
||||
html.P(
|
||||
str(data.get("RISK", "")),
|
||||
html.Div(
|
||||
dcc.Markdown(
|
||||
str(
|
||||
data.get("RISK", "")
|
||||
),
|
||||
dangerously_allow_html=True,
|
||||
style={
|
||||
"margin-left": "0px",
|
||||
"padding-left": "10px",
|
||||
},
|
||||
),
|
||||
className="markdown-content",
|
||||
style={
|
||||
"margin-left": "5px"
|
||||
"margin-left": "0px",
|
||||
"padding-left": "10px",
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
@@ -1847,23 +1888,32 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
html.Strong(
|
||||
"Recommendation: ",
|
||||
style={
|
||||
"margin-right": "5px"
|
||||
"margin-bottom": "8px"
|
||||
},
|
||||
)
|
||||
),
|
||||
html.P(
|
||||
str(
|
||||
data.get(
|
||||
"REMEDIATION_RECOMMENDATION_TEXT",
|
||||
"",
|
||||
)
|
||||
html.Div(
|
||||
dcc.Markdown(
|
||||
str(
|
||||
data.get(
|
||||
"REMEDIATION_RECOMMENDATION_TEXT",
|
||||
"",
|
||||
)
|
||||
),
|
||||
dangerously_allow_html=True,
|
||||
style={
|
||||
"margin-left": "0px",
|
||||
"padding-left": "10px",
|
||||
},
|
||||
),
|
||||
className="markdown-content",
|
||||
style={
|
||||
"margin-left": "5px"
|
||||
"margin-left": "0px",
|
||||
"padding-left": "10px",
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
style={"margin-bottom": "15px"},
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
|
||||
@@ -14,9 +14,11 @@ services:
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8080}:${DJANGO_PORT:-8080}"
|
||||
volumes:
|
||||
- "./api/src/backend:/home/prowler/backend"
|
||||
- "./api/pyproject.toml:/home/prowler/pyproject.toml"
|
||||
- "outputs:/tmp/prowler_api_output"
|
||||
- ./api/src/backend:/home/prowler/backend
|
||||
- ./api/pyproject.toml:/home/prowler/pyproject.toml
|
||||
- ./api/docker-entrypoint.sh:/home/prowler/docker-entrypoint.sh
|
||||
- ./_data/api:/home/prowler/.config/prowler-api
|
||||
- outputs:/tmp/prowler_api_output
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -64,7 +66,7 @@ services:
|
||||
image: valkey/valkey:7-alpine3.19
|
||||
hostname: "valkey"
|
||||
volumes:
|
||||
- ./api/_data/valkey:/data
|
||||
- ./_data/valkey:/data
|
||||
env_file:
|
||||
- path: .env
|
||||
required: false
|
||||
|
||||
@@ -8,7 +8,8 @@ services:
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8080}:${DJANGO_PORT:-8080}"
|
||||
volumes:
|
||||
- "output:/tmp/prowler_api_output"
|
||||
- ./_data/api:/home/prowler/.config/prowler-api
|
||||
- output:/tmp/prowler_api_output
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
|
||||
@@ -0,0 +1,533 @@
|
||||
Always that you are writting documentation try to follow this text/communication style guide:
|
||||
|
||||
# Prowler's Brand Voice
|
||||
|
||||
Prowler is the open cloud security platform trusted by thousands of organizations automating security monitoring and compliance with hundreds of built-in security checks, remediation solutions, and compliance frameworks. With over 10 million downloads, thousands of contributors, and a vibrant global community, Prowler is driving the open-source cloud security movement by providing transparent, customizable, and user-friendly solutions that help teams secure AWS, Azure, GCP, Kubernetes, and Microsoft 365 environments. Leveraging open-source innovation and cost savings, the Prowler platform makes cloud security 10 times more cost-effective and accessible than alternatives.
|
||||
|
||||
These values must be demonstrated in all our conversations and communications.
|
||||
|
||||
---
|
||||
|
||||
## Unbiased Communication
|
||||
|
||||
Prowler aims to reach every person in the globe. Our communications must be as inclusive and diverse as possible every time. We are guided by the following principles:
|
||||
|
||||
### Avoid Gendered Pronouns
|
||||
|
||||
Reference to gendered pronouns (she/her/hers, he/his/his, they/them/theirs) must be avoided whenever possible.
|
||||
|
||||
* Use second person for communications (you/your/yours).
|
||||
* Use a third-person reference instead of a gendered pronoun (the customer, the user).
|
||||
* In case a gendered pronoun must be forcibly used, use they/them/theirs.
|
||||
* Avoid double references like she/he, s/he, etc.
|
||||
|
||||
### Use Alternatives for Gendered Nouns
|
||||
|
||||
Avoid nouns that include gendered components. Examples:
|
||||
|
||||
* Businessman 🡪 Entrepreneur, businessperson, executive
|
||||
* Salesman 🡪 Sales executive, sales representative
|
||||
* Mankind 🡪 Humanity, people
|
||||
* Penmanship 🡪 Calligraphy, handwriting
|
||||
* Middleman 🡪 Intermediary, negotiator
|
||||
|
||||
### Diversity, Equity, and Inclusion
|
||||
|
||||
All communications must prioritize diversity and inclusivity. When incorporating examples, ensure representation across sex, gender, age, identity, race, culture, background, ability, and socioeconomic status. Strive for balanced and respectful depictions.
|
||||
|
||||
### Cultural and Geographical Awareness
|
||||
|
||||
Before referencing a region, country, culture, national status, political status, or socioeconomic realities, conduct thorough research. Maintain a respectful, informed approach and avoid unnecessary conflicts.
|
||||
|
||||
### Avoiding Generalizations
|
||||
|
||||
Avoid broad assumptions about gender, sex, race, sexual orientation, nationality, or culture. Generalizations can introduce bias and misrepresentation. Example to avoid: "Cybersecurity is of the utmost importance in the country, where corruption runs amok."
|
||||
|
||||
### Respectful Language
|
||||
|
||||
Derogatory terms must not be used. If uncertain about terminology, consult individuals from the relevant region or community to ensure accuracy and appropriateness.
|
||||
|
||||
### Clear and Accessible Language
|
||||
|
||||
* **Jargon:** Use technical terminology only when the audience is expected to understand it. If uncertain, opt for clear and universally accessible language.
|
||||
* **Slang:** Minimize slang usage. Even when confident about the audience, prefer formal and neutral language to enhance clarity.
|
||||
|
||||
### Militaristic Language
|
||||
|
||||
Current tendencies in a topic as sensitive as cybersecurity avoid violent and militaristic references save for explicit reference to combat. These are some alternatives:
|
||||
|
||||
* Combat, fight, eliminate 🡪 Address, protect, safeguard, ward
|
||||
* Kill chain 🡪 Cyberattack chain
|
||||
* Attacker 🡪 Cyberattacker, bad actor, threat actor
|
||||
* Defense-in-depth approach 🡪 Multilayered approach
|
||||
* First line of defense, frontline 🡪 Security, protection, defense
|
||||
* External attack surface 🡪 Vulnerabilities, point of access, external exposure
|
||||
|
||||
### Note on Safety and Security
|
||||
|
||||
“Safety” and “security” are terms often misunderstood. “Safety” is the microscopic, personal and individual term, while “security” is the macroscopic, broader, national term. Examples:
|
||||
|
||||
a. Seat belts are great for personal safety.
|
||||
b. National security is of the utmost concern nowadays.
|
||||
|
||||
---
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
### Prowler Features
|
||||
|
||||
Prowler Features are considered proper nouns. They are to be referenced without articles in all pieces of writing.
|
||||
|
||||
This is a list of Prowler Features:
|
||||
|
||||
* **Prowler App**
|
||||
* **Prowler CLI**
|
||||
* **Prowler SDK**
|
||||
* **Built-in Compliance Checks**
|
||||
* **Multi-cloud Security Scanning**
|
||||
* **Autonomous Cloud Security Analyst (AI)**
|
||||
* **Threat & Misconfiguration Detection**
|
||||
* **Role-Based Access Control (RBAC)**
|
||||
* **Identity & Access Risk Detection**
|
||||
* **Tag-Based Scanning & Filtering**
|
||||
* **Audit Logs & Security Reports**
|
||||
* **Agentless & Works Anywhere**
|
||||
* **Automated Scans & Continuous Monitoring**
|
||||
* **Chat-based Security Querying (AI)**
|
||||
* **AI-Generated Detections & Remediations**
|
||||
* **Prowler Studio**
|
||||
* **Custom Security Policies**
|
||||
* **Prowler Cloud**
|
||||
* **Prowler Registry**
|
||||
* **Open Source & Full APIs**
|
||||
|
||||
---
|
||||
|
||||
## Verbal Constructions in Technical Writing
|
||||
|
||||
Choosing verbal constructions (using verbs) over nominal (using nouns) constructions can significantly impact the clarity, conciseness and especially readability of the content.
|
||||
|
||||
Nominal constructions often introduce unnecessary complexity or vagueness. For example:
|
||||
|
||||
* Nominal: "The creation of the report was successful."
|
||||
* Verbal: "The report was successfully created."
|
||||
|
||||
Verbal constructions also tend to use fewer words, resulting in a more polished and concise style:
|
||||
|
||||
* Nominal: "The implementation of the solution reduced system downtime."
|
||||
* Verbal: "The solution reduced system downtime."
|
||||
|
||||
Verbal constructions are to be chosen over nominal constructions whenever possible.
|
||||
|
||||
---
|
||||
|
||||
### Addendum: Verbal Structures Actually State your Purpose
|
||||
|
||||
* **Example 1:** Recommendation for multiple subscriptions
|
||||
* **Example 2:** Recommendation for Managing Multiple Subscriptions
|
||||
|
||||
Example 1 is vague and even potentially ambiguous. Verbs state your purpose and they must be used whenever possible.
|
||||
|
||||
---
|
||||
|
||||
## Avoiding The Second Person Except for Imperative Instructions
|
||||
|
||||
Explicit use of second-person pronouns (you) and possessives (your) should be minimized whenever possible. Those constructions are best reserved for cases when instructions are directly given in an imperative form:
|
||||
|
||||
**Example of Improvement Through Avoiding Second Person Pronouns**
|
||||
|
||||
**Original:**
|
||||
Prowler App can be installed in different ways, depending on your environment:
|
||||
|
||||
**Improved Version:**
|
||||
Prowler App offers flexible installation methods tailored to various environments:
|
||||
|
||||
---
|
||||
|
||||
## Title-Case Capitalization
|
||||
|
||||
We use title case.
|
||||
|
||||
**Example:** This Is an Example on Title Case
|
||||
|
||||
Title case tends to be better for SEO because it improves readability and makes headlines more visually distinct, which can lead to higher click-through rates (CTR).
|
||||
|
||||
---
|
||||
|
||||
### Other Considerations on Capitalization
|
||||
|
||||
Follow these additional guidelines for capitalization:
|
||||
|
||||
### Inner Capitalization
|
||||
|
||||
Avoid internal capitalization of words in body text unless it is part of a proper name or brand denomination. Example: instead of E-mail and e-Book use email/e-mail and e-book.
|
||||
|
||||
### Acronym Capitalization
|
||||
|
||||
Do not capitalize the individual words of the spelled-out form of acronyms. Example: instead of CTI (Cyber Threat Intelligence) use CTI (cyber threat intelligence), but AWS (Amazon Web Services) is to be kept as is.
|
||||
|
||||
### Avoid Capitalization for Emphasis
|
||||
|
||||
Do not capitalize words in order to emphasize them.
|
||||
|
||||
### Capitalization of Languages and Standards
|
||||
|
||||
Check for the proper capitalization of language and standard names.
|
||||
|
||||
Language examples: HTML, JSON, YAML, XML, etc., must be capitalized.
|
||||
|
||||
Standard examples: standards follow title-case capitalization: Industrial Automation and Control Systems (IACS).
|
||||
|
||||
### Capitalization of Laws and Regulations
|
||||
|
||||
Laws and Regulations follow title-case capitalization. If referring to a non-domestic law or regulation, add the nationality and the original name.
|
||||
|
||||
**Example:** Code for the Cybersecurity Law published in the Spanish Official State Bulletin (BOE, Boletín Oficial del Estado).
|
||||
|
||||
Most UE Regulations have an official translation for all UE languages; please check it on EUR-Lex portal and choose the proper language: https://eur-lex.europa.eu/.
|
||||
|
||||
The different languages can be chosen on the portal under Languages, formats and link to OJ.
|
||||
|
||||
---
|
||||
|
||||
## Hyphenation
|
||||
|
||||
Hyphenation is to be used for noun modifiers in prenominal position, i.e., placed before nouns.
|
||||
|
||||
**Example:** Prowler is a world-leading company in open-source software.
|
||||
|
||||
It is not to be used for predicate adjectives in postnominal position.
|
||||
|
||||
**Example:** Prowler has many features built in.
|
||||
|
||||
### Note on Hyphenation and SEO
|
||||
|
||||
Google treats hyphens as word separators, as if they were blank spaces, i.e., the term `high quality checks` is treated as if it was the same term as `high-quality checks`. Hyphenation does not affect SEO on body text, thus the grammatically correct approach is recommended as sign of good writing. However, underscores (`_`) are treated as different words. This has implications particularly for URLs.
|
||||
|
||||
Hyphens are preferred for URLs as they improve readability and indexing.
|
||||
|
||||
**Example:**
|
||||
* Better approach: `example.com/this-is-an-URL`
|
||||
* Less ideal approach: `example.com/this_is_an_URL`
|
||||
|
||||
---
|
||||
|
||||
## Bullet Points
|
||||
|
||||
Bullet points offer several advantages:
|
||||
|
||||
* **Improved readability:** Bullet points make information scannable and enable vertical reading, allowing users to easily spot relevant details and breaking content into more digestible pieces.
|
||||
* **Highlighting of relevant information:** They emphasize the most salient points, improving focus and enabling quick summarization at a glance.
|
||||
* **Improved retention:** Bullet points enhance memory retention and contribute to a clearer, more polished final product.
|
||||
* **Structured presentation:** They improve user experience through the logical organization of content.
|
||||
* **SEO relevance (Search Engine Optimization):** Bullet points make content easier to consume and offer the following SEO benefits:
|
||||
* Reduced bounce rates
|
||||
* Increased time spent on page
|
||||
* Strategic keyword optimization
|
||||
* Improved chances of being featured in search engine snippets
|
||||
* Enhanced crawlability for search engines.
|
||||
|
||||
---
|
||||
|
||||
### When to Use Bullet Points
|
||||
|
||||
The use of bullet points is highly recommended when:
|
||||
|
||||
* Information can be logically divided into multiple categories, each sharing characteristics, features, or other relevant classifications.
|
||||
* Items are significant enough as standalone concepts to deserve their own bullet point.
|
||||
|
||||
**Example of Improvement Through Bullet Points**
|
||||
|
||||
**Original:**
|
||||
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMS, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme), and your custom security frameworks.
|
||||
|
||||
**Improved with Bullet Points:**
|
||||
|
||||
**Prowler CLI Features:**
|
||||
Prowler CLI includes hundreds of built-in controls to ensure compliance with standards and frameworks, including:
|
||||
|
||||
* **Industry standards:** CIS, NIST 800, NIST CSF, and CISA
|
||||
* **Regulatory compliance and governance:** RBI, FedRAMP, and PCI-DSS
|
||||
* **Frameworks for sensitive data and privacy:** GDPR, HIPAA, and FFIEC
|
||||
* **Frameworks for organizational governance and quality control:** SOC2 and GXP
|
||||
* **AWS-specific guidance:** AWS Foundational Technical Review (FTR) and AWS Well-Architected Framework (Security Pillar)
|
||||
* **Regional compliance:** ENS (Spanish National Security Scheme)
|
||||
* **Custom security frameworks:** Tailored to meet your organization’s specific needs
|
||||
|
||||
---
|
||||
|
||||
### Punctuation of Bullet Points
|
||||
|
||||
There are several options for punctuating bullet points. Regardless of the style chosen, it is imperative to maintain consistency throughout the text.
|
||||
|
||||
* **No punctuation (minimalistic):** This strategy is suitable when no verbs are involved and is best used to highlight products or features in isolation. For example:
|
||||
|
||||
Prowler App is composed of three key components:
|
||||
* Prowler UI
|
||||
* Prowler API
|
||||
* Prowler SDK
|
||||
|
||||
This example highlights each element individually and fosters retention with a noise-free approach.
|
||||
|
||||
* **Periods for full sentences:** This approach works best when each bullet point forms a full sentence or includes verbs. For example:
|
||||
|
||||
Prowler App is composed of three key components:
|
||||
* Prowler UI, a web-based interface, built with Next.js, providing a user-friendly experience for executing Prowler scans and visualizing results.
|
||||
* Prowler API, a backend service, developed with Django REST Framework, responsible for running Prowler scans and storing the generated results.
|
||||
* Prowler SDK, a Python SDK designed to extend the functionality of the Prowler CLI for advanced capabilities.
|
||||
|
||||
This example demonstrates a polished list using proper punctuation.
|
||||
|
||||
* **Semi-colons with final period:** This approach was traditionally used for those cases when bullet points were part of a continuous sentence or logical succession. However, it is being deprecated, consistent with the declining use of semi-colons in modern writing. It is to be avoided whenever possible. For example:
|
||||
|
||||
Prowler UI:
|
||||
* is a web-based interface;
|
||||
* is built with Next.js;
|
||||
* provides a user-friendly experience for executing Prowler scans and visualizing results.
|
||||
|
||||
---
|
||||
|
||||
### Advantages of Adding Headers to Bullet Points
|
||||
|
||||
Adding headers to bullet points in technical writing is a powerful technique that enhances both the clarity and usability of the content. It has also advantages for SEO:
|
||||
|
||||
* Increased crawlability of search engines
|
||||
* Enhanced keyword integration
|
||||
* Improved user engagement
|
||||
* Enhanced snippetting by search engines
|
||||
* Reduced bounce rates
|
||||
|
||||
It is recommended to add headers to bullet points whenever possible.
|
||||
|
||||
---
|
||||
|
||||
## Quotation Marks
|
||||
|
||||
### Quotation Marks Usage in Technical Documentation
|
||||
|
||||
Proper use of quotation marks enhances clarity and consistency in technical writing. Below are key guidelines for using double and single quotation marks, following American English conventions.
|
||||
|
||||
### Double Quotation Marks
|
||||
|
||||
* Use for titles of books, movies, songs, and articles.
|
||||
* Enclose direct quotes:
|
||||
* **Example:** The developer said, “We will try to fix this issue.”
|
||||
* Capitalize the first word if quoting a full sentence.
|
||||
* When quoting a phrase within a sentence, do not capitalize:
|
||||
* **Example:** The news portal called our product “one of the most efficient online help authoring tools.”
|
||||
* Use scare quotes when words acquire a different or ironic meaning:
|
||||
* **Example:** The update is “scheduled” to release next week.
|
||||
* To refer to a term without applying its meaning, use double quotes (or italics):
|
||||
* **Example:** Avoid terms like “don’t worry” in pop-ups to prevent user anxiety.
|
||||
|
||||
### Single Quotation Marks
|
||||
|
||||
* Used inside double quotes:
|
||||
* **Example:** He said, “I am not sure what ‘single sourcing’ means.”
|
||||
* In British English, the order is often reversed (single quotation marks on the outside).
|
||||
|
||||
---
|
||||
|
||||
### Double Quoting in Software Documentation
|
||||
|
||||
Double quoting is to be used through software documentation where their use does not interfere with formatting restrictions.
|
||||
|
||||
1. **Menu Items & UI Options**
|
||||
* Use double quotation marks when referring to selectable items in software interfaces.
|
||||
* **Example:** Click “File” and select “Save As” to export your document.
|
||||
2. **Buttons & Commands**
|
||||
* Use double quotes for labeled interface elements that users interact with.
|
||||
* **Example:** Select “Submit” to finalize the form.
|
||||
3. **Exact Input & User Actions**
|
||||
* If users need to enter exact text, enclose it in double quotes.
|
||||
* **Example:** Type “admin” in the username field.
|
||||
4. **Avoid Quoting Software Names**
|
||||
* Do not use quotation marks for software product names unless required for clarity.
|
||||
* **Correct:** Open Microsoft Excel.
|
||||
* **Incorrect:** Open “Microsoft Excel.”
|
||||
|
||||
---
|
||||
|
||||
## Interaction Verbs
|
||||
|
||||
The following are the correct verbs that must be used when referring to user interactions with the software.
|
||||
|
||||
### Mouse & Trackpad Actions (Desktop/Laptop)
|
||||
|
||||
* **Click:** Press and release the left mouse button or trackpad without moving the pointer.
|
||||
* **Example:** Click the “OK” button to confirm. 🡪 Transitive
|
||||
* **Click on:** Often interchangeable with "Click," but less commonly used in technical writing for UI interactions.
|
||||
* **Example:** Click on the "Settings" icon to open preferences. (Less recommended—“Click” is preferred.)
|
||||
* **Double-click:** Press and release twice in quick succession, usually to open files or applications.
|
||||
* **Example:** Double-click the document to open it. 🡪 Transitive
|
||||
* **Right-click:** Press and release the right mouse button to open a context menu.
|
||||
* **Example:** Right-click the folder and select “Properties.” 🡪 Transitive
|
||||
|
||||
### Touchscreen Actions (Mobile & Touch)
|
||||
|
||||
* **Tap:** Touch the screen lightly with a finger or stylus, equivalent to "Click" on a mouse.
|
||||
* **Example:** Tap the “Sign in” button.
|
||||
* **Double-tap:** Quickly touch the screen twice, often used for zooming or selecting text.
|
||||
* **Example:** Double-tap an image to zoom in.
|
||||
* **Press and hold:** Touch and hold the screen for a moment to access additional options.
|
||||
* **Example:** Press and hold an app icon to see more actions. (Similar to “Right-click” in desktop environments.)
|
||||
|
||||
### Additional Actions
|
||||
|
||||
* **Drag:** Click or tap an item and move it while holding down the button or finger.
|
||||
* **Example:** Drag the file into the folder.
|
||||
* **Swipe:** Move a finger across the touchscreen horizontally or vertically.
|
||||
* **Example:** Swipe left to dismiss the notification.
|
||||
* **Pinch to zoom:** Use two fingers to zoom in or out.
|
||||
* **Example:** Pinch the screen to zoom in on the image.
|
||||
* **Scroll:** Move the mouse wheel, swipe, or use the arrow keys to navigate up/down.
|
||||
* **Example:** Scroll down to see more results.
|
||||
|
||||
The widely-accepted terminology for gestures is Windows’: https://support.microsoft.com/en-us/windows/touch-gestures-for-windows-a9d28305-4818-a5df-4e2b-e5590f850741
|
||||
|
||||
---
|
||||
|
||||
## Sentence Structure for Technical Writing and SEO
|
||||
|
||||
When writing technical documentation, clarity, conciseness, and searchability (SEO) are key factors. Let’s compare the following two sentence structures, extracted from Prowler’s documentation:
|
||||
|
||||
**Option 1:**
|
||||
"Open a terminal and execute the following command to create a new custom role."
|
||||
|
||||
**Option 2:**
|
||||
"To create a new custom role, open a terminal and execute the following command."
|
||||
|
||||
### SEO Optimization
|
||||
|
||||
* Search engines prioritize clear intent at the beginning of a sentence.
|
||||
* Option 2 starts with the action users are likely to search for (e.g., "Create a custom role"), which improves SEO rankings and makes the content more likely to match search queries.
|
||||
* Option 1 places the primary search term toward the end, making it less effective for keyword optimization.
|
||||
|
||||
### Technical Writing Best Practices
|
||||
|
||||
* Technical writing emphasizes clear objectives first, followed by actions.
|
||||
* Option 2 follows this best practice by stating the goal first ("To create a new custom role") and then providing instructions.
|
||||
* Option 1 is still acceptable for step-by-step guides, but Option 2 is more effective for tutorials, manuals, and documentation.
|
||||
|
||||
### Key Takeaways
|
||||
|
||||
* Draft trying to mimic the most likely way users are to find the information (“Ctrl + F approach”).
|
||||
* Place keywords and key terms at the beginning of sentences so that they rank better SEO-wise.
|
||||
* Rule of thumb: “In order to what” precedes the “what”. “What” must mirror the user’s most likely way of drafting or searching.
|
||||
|
||||
---
|
||||
|
||||
## Section Titles and Headers in Technical Writing
|
||||
|
||||
Effective headers and section titles enhance document readability and structure, making content more accessible to the reader. This chapter outlines best practices for crafting clear, consistent, and meaningful headings.
|
||||
|
||||
1. **Purpose of Headers**
|
||||
Headers serve several key functions:
|
||||
* **Improve Navigation:** Allow users to quickly locate relevant information.
|
||||
* **Enhance Readability:** Break down complex topics into manageable sections.
|
||||
* **Establish Hierarchy:** Define the logical flow of content.
|
||||
* **SEO:** Headers impact SEO both directly and indirectly:
|
||||
* Search engines use headings to determine the hierarchy and relevance of content.
|
||||
* **H1:** The primary heading (should be unique and descriptive).
|
||||
* **H2-H6:** Subheadings that break down content logically.
|
||||
* Best practices for SEO-friendly headers:
|
||||
* Include keywords naturally in headings.
|
||||
* Avoid keyword stuffing—keep it clear and readable.
|
||||
* Use structured hierarchy (H1 → H2 → H3, etc.).
|
||||
2. **Header Levels and Formatting**
|
||||
Use a structured approach for organizing section titles. Common conventions include:
|
||||
* **Title:** The primary heading of the document (e.g., H1).
|
||||
* **Main Sections:** First-level headers (H2), introducing key content areas.
|
||||
* **Subsections:** Second-level headers (H3) to detail specific topics within sections.
|
||||
* **Subtopics:** Third-level headers (H4+) used sparingly for finer details.
|
||||
|
||||
**Example:**
|
||||
|
||||
```markdown
|
||||
# Document Title (H1)
|
||||
## Main Section (H2)
|
||||
### Subsection (H3)
|
||||
#### Subtopic (H4)
|
||||
```
|
||||
|
||||
3. **Writing Effective Headers**
|
||||
When crafting headers and section titles, follow these guidelines:
|
||||
* **Be Descriptive:** Clearly indicate what the section covers.
|
||||
* **Poor:** Introduction (too vague)
|
||||
* **Good:** Introduction to AWS CloudShell Installation (informative)
|
||||
* **Keep It Concise:** Use precise language without unnecessary words.
|
||||
* **Maintain Consistency:** Apply uniform formatting and style conventions throughout.
|
||||
* **Avoid Special Characters:** Limit punctuation for clarity—avoid excessive symbols, dashes, or underscores.
|
||||
4. **Capitalization Rules**
|
||||
Use Title Case for headers to ensure a professional look:
|
||||
* **Good:** How to Clone and Install Prowler from GitHub
|
||||
* **Poor:** How to clone and install Prowler from GitHub
|
||||
|
||||
For technical documentation, sentence case may be used for readability in subheadings. Please note this differs from headers and it is only a recommendation, but consistency is to be kept throughout the documentation:
|
||||
|
||||
* **Example:**
|
||||
* How to Clone and Install Prowler from GitHub (header: Title case)
|
||||
* How to install poetry dependencies (subheading: Sentence case)
|
||||
5. **Using Keywords in Headers**
|
||||
Headers should include relevant keywords to improve document searchability:
|
||||
* **Good:** Scanning AWS Accounts in Parallel
|
||||
* **Poor:** Ways to scan on AWS (vague and imprecise)
|
||||
6. **Consistency Across Documents**
|
||||
Ensure uniformity in section titles across related documentation:
|
||||
* **Standardized Header Naming:** Use consistent wording for common sections (e.g., "Installation," "Setup," "Configuration").
|
||||
* **Numbering Sections (If Necessary):** For structured guides, include numbering where appropriate (e.g., "Step 1: Install Prowler").
|
||||
|
||||
---
|
||||
|
||||
## Avoid Assumptions Regarding Audience’s Expertise
|
||||
|
||||
### Understand Your Audience’s Expertise
|
||||
|
||||
Despite knowing your target audience, assumptions on target audience’s expertise or knowledge are to be avoided.
|
||||
|
||||
Adjust the level of detail based on expected reader proficiency, but make sure to be as explanatory as humanly possible.
|
||||
|
||||
### Define Key Terms and Acronyms on First Use
|
||||
|
||||
Even if your audience is technical, some domain-specific terms may vary.
|
||||
* Introduce jargon only after defining it clearly.
|
||||
* If using acronyms (e.g., IAM, MFA), spell them out on first mention:
|
||||
* AWS Identity and Access Management (IAM)
|
||||
* Multifactor Authentication (MFA)
|
||||
|
||||
### Don’t Assume Unwritten Knowledge
|
||||
|
||||
Even experienced readers may not know every prerequisite. If a process relies on prior steps, briefly reference them:
|
||||
|
||||
* Before configuring security groups, ensure VPC networking is set up.
|
||||
|
||||
### Use Consistent Formatting
|
||||
|
||||
### Provide as Many Examples as Deemed Right… and Then Some
|
||||
|
||||
### Anticipate Common Knowledge Gaps
|
||||
|
||||
### Avoid Excessive Notes
|
||||
|
||||
Notes are often omitted by readers and they clutter text, so use them sparingly and only for additional information that is not essential or prompts any error or mistake.
|
||||
|
||||
---
|
||||
|
||||
## Using Warnings and Danger Calls for High-Severity Information
|
||||
|
||||
In technical documentation, warnings and danger calls highlight critical risks, guiding users in preventing security breaches or system failures. Proper usage ensures clarity and actionable guidance.
|
||||
|
||||
1. **Define Severity Levels**
|
||||
Before applying Note, Warning, or Danger, clearly define their significance:
|
||||
* **Note:** Provides general information or best practices (low severity).
|
||||
* **Warning:** Indicates potential issues if instructions are not followed (moderate severity).
|
||||
* **Danger:** Highlights actions that could result in severe consequences, such as system corruption or data loss (high severity).
|
||||
2. **Explain Consequences**
|
||||
Each warning or danger call should explicitly describe the impact of ignoring the caution:
|
||||
* **Good:** Disabling encryption may expose sensitive data to unauthorized access.
|
||||
* **Poor:** Avoid disabling encryption.
|
||||
3. **Provide Remediation and Troubleshooting**
|
||||
Whenever possible, direct users to troubleshooting guides or mitigation steps to resolve the issue.
|
||||
|
||||
**Example:**
|
||||
**Danger:** Running this command will **permanently delete all data**. Refer to @Data Recovery Guide for restoration steps.
|
||||
@@ -279,4 +279,4 @@ You can filter scans to specific organizations or projects:
|
||||
prowler mongodbatlas --atlas-project-id <project_id>
|
||||
```
|
||||
|
||||
See more details about MongoDB Atlas Authentication in [Requirements](../getting-started/requirements.md#mongodb-atlas)
|
||||
See more details about MongoDB Atlas Authentication in [MongoDB Atlas Authentication](../tutorials/mongodbatlas/authentication.md)
|
||||
|
||||
@@ -0,0 +1,213 @@
|
||||
# Check Metadata Guidelines
|
||||
|
||||
## Introduction
|
||||
|
||||
This guide provides comprehensive guidelines for creating check metadata in Prowler. For basic information on check metadata structure, refer to the [check metadata](./checks.md#metadata-structure-for-prowler-checks) section.
|
||||
|
||||
## Check Title Guidelines
|
||||
|
||||
### Writing Guidelines
|
||||
|
||||
1. **Determine Resource Finding Scope (Singular vs. Plural)**:
|
||||
When determining whether to use singular or plural in the check title, examine the code for certain patterns. If the code contains a loop that generates an individual report for each resource, use the singular form. If the code produces a single report that covers all resources collectively, use the plural form. For organization- or account-wide checks, select the scope that best matches the breadth of the evaluation. Additionally, review the `status_extended` field messages in the code, as they often provide clues about whether the check is scoped to individual resources or to groups of resources.
|
||||
Analyze the detection code to determine if the check reports on individual resources or aggregated resources:
|
||||
- **Singular**: Use when the check creates one report per resource (e.g., "EC2 instance has IMDSv2 enforced", "S3 bucket does not allow public write access").
|
||||
- **Plural**: Use when the check creates one report for all resources together (e.g., "All EC2 instances have IMDSv2 enforced", "S3 buckets do not allow public write access").
|
||||
2. **Describe the Compliant (*PASS*) State**:
|
||||
Always write the title to describe the **desired, compliant state** of the resources. The title should reflect what it looks like when the audited resource is following the check's requirements.
|
||||
3. **Be Specific and Factual**:
|
||||
Include the exact secure configuration being verified. Avoid vague or generic terms like "properly configured".
|
||||
4. **Avoid Redundant or Action Words**:
|
||||
Do not include verbs like "Check", "Verify", "Ensure", or "Monitor". The title is a declarative statement of the secure condition.
|
||||
5. **Length Limit**:
|
||||
Keep the title under 150 characters.
|
||||
|
||||
### Common Mistakes to Avoid
|
||||
|
||||
- Starting with verbs like "Check", "Verify", "Ensure", "Make sure". Always start with the affected resource instead.
|
||||
- Being too vague or generic (e.g., "Ensure security groups are properly configured", what does it mean? "properly configured" is not a clear description of the compliant state).
|
||||
- Focusing on the non-compliant state instead of the compliant state.
|
||||
- Using unclear scope and resource identification.
|
||||
|
||||
## Check Type Guidelines (AWS Only)
|
||||
|
||||
### AWS Security Hub Type Format
|
||||
|
||||
AWS Security Hub uses a three-part type taxonomy:
|
||||
|
||||
- **Namespace**: The top-level security domain.
|
||||
- **Category**: The security control family or area.
|
||||
- **Classifier**: The specific security concern (optional).
|
||||
|
||||
A partial path may be defined (e.g., `TTPs` or `TTPs/Defense Evasion` are valid).
|
||||
|
||||
### Selection Guidelines
|
||||
|
||||
1. **Be Specific**: Use the most specific classifier that accurately describes the check.
|
||||
2. **Standard Compliance**: Consider if the check relates to specific compliance standards.
|
||||
3. **Multiple Types**: You can specify multiple types if the check addresses multiple concerns.
|
||||
|
||||
## Description Guidelines
|
||||
|
||||
### Writing Guidelines
|
||||
|
||||
1. **Focus on the Finding**: All fields should address how the finding affects the security posture, rather than the control itself.
|
||||
2. **Use Natural Language**: Write in simple, clear paragraphs with complete, grammatically correct sentences.
|
||||
3. **Use Markdown Formatting**: Enhance readability with:
|
||||
- Use **bold** for emphasis on key security concepts.
|
||||
- Use *italic* for a secondary emphasis. Use it for clarifications, conditions, or optional notes. But don't abuse it.
|
||||
- Use `code` formatting for specific configuration values, or technical details. Don't use it for service names or common technical terms.
|
||||
- Use one or two line breaks (`\n` or `\n\n`) to separate distinct ideas.
|
||||
- Use bullet points (`-`) for listing multiple concepts or actions.
|
||||
- Use numbers for listing steps or sequential actions.
|
||||
4. **Be Concise**: Maximum 400 characters (spaces count). Every word should add value.
|
||||
5. **Explain What the Finding Means**: Focus on what the security control evaluates and what it means when it passes or fails, but without explicitly stating the pass or fail state.
|
||||
6. **Be Technical but Clear**: Use appropriate technical terminology while remaining understandable.
|
||||
7. **Avoid Risk Descriptions**: Do not describe potential risks, threats, or consequences.
|
||||
8. **CheckTitle and Description can be the same**: If the check is very simple and the title is already clear, you can use the same text for the description.
|
||||
|
||||
### Common Mistakes to Avoid
|
||||
|
||||
- **Technical Implementation Details**: "The control loops through all instances and calls the describe_instances API...".
|
||||
- **Vague Descriptions**: "This control verifies proper configuration of resources". What does it mean? "proper configuration" is not a clear description of the compliant state.
|
||||
- **Risk Descriptions**: "This could lead to data breaches" or "This poses a security threat".
|
||||
- **Starting with Verbs**: "Check if...", "Verify...", "Ensure...". Always start with the affected resource instead.
|
||||
- **References to Pass/Fail States**: Avoid using words like "pass" or "fail".
|
||||
|
||||
## Risk Guidelines
|
||||
|
||||
### Writing Guidelines
|
||||
|
||||
1. **Explain the Cybersecurity Impact**: Focus on how the finding affects confidentiality, integrity, or availability (CIA triad). If the CIA triad does not apply, explain the risk in terms of the organization's business objectives.
|
||||
2. **Be Specific About Threats**: Clearly state what could happen if this security control is not in place. What attacks or incidents become possible?
|
||||
3. **Focus on Risk Context**: Explain the specific security implications of the finding, not just generic security risks.
|
||||
4. **Use Markdown Formatting**: Enhance readability with markdown formatting:
|
||||
- Use **bold** for emphasis on key security concepts.
|
||||
- Use *italic* for a secondary emphasis. Use it for clarifications, conditions, or optional notes. But don't abuse it.
|
||||
- Use `code` formatting for specific configuration values, or technical details. Don't use it for service names or common technical terms.
|
||||
- Use one or two line breaks (`\n` or `\n\n`) to separate distinct ideas.
|
||||
- Use bullet points (`-`) for listing multiple concepts or actions.
|
||||
- Use numbers for listing steps or sequential actions.
|
||||
5. **Be Concise**: Maximum 400 characters. Make every word count.
|
||||
|
||||
### Common Mistakes to Avoid
|
||||
|
||||
- **Generic Risks**: "This could lead to security issues" or "Regulatory compliance violations".
|
||||
- **Technical Implementation Focus**: "The API call might fail and return incorrect results...".
|
||||
- **Overly Broad Statements**: "This is a serious security risk that could impact everything".
|
||||
- **Vague Threats**: "This could be exploited by threat actors" without explaining how.
|
||||
|
||||
## Recommendation Guidelines
|
||||
|
||||
### Writing Guidelines
|
||||
|
||||
1. **Provide Actionable Best Practice Guidance**: Explain what should be done to maintain security posture. Focus on preventive measures and proactive security practices.
|
||||
2. **Be Principle-Based**: Reference established security principles (least privilege, defense in depth, zero trust, separation of duties) where applicable.
|
||||
3. **Focus on Prevention**: Explain best practices that prevent the security issue from occurring, not just detection or remediation.
|
||||
4. **Use Markdown Formatting**: Enhance readability with markdown formatting:
|
||||
- Use **bold** for emphasis on key security concepts.
|
||||
- Use *italic* for a secondary emphasis. Use it for clarifications, conditions, or optional notes. But don't abuse it.
|
||||
- Use `code` formatting for specific configuration values, or technical details. Don't use it for service names or common technical terms.
|
||||
- Use one or two line breaks (`\n` or `\n\n`) to separate distinct ideas.
|
||||
- Use bullet points (`-`) for listing multiple concepts or actions.
|
||||
- Use numbers for listing steps or sequential actions.
|
||||
5. **Be Concise**: Maximum 400 characters.
|
||||
|
||||
### Common Mistakes to Avoid
|
||||
|
||||
- **Specific Remediation Steps**: "1. Go to the console\n2. Click on settings..." - Focus on principles, not click-by-click instructions.
|
||||
- **Implementation Details**: "Configure the JSON policy with the following IAM actions..." - Explain what to achieve, not how.
|
||||
- **Vague Guidance**: "Follow security best practices..." without explaining what those practices are.
|
||||
- **Resource-Specific Recommendations**: "Enable MFA on user john.doe@example.com" - Keep it general.
|
||||
- **Missing Context**: Not explaining why the best practice is important for security.
|
||||
|
||||
### Good Examples
|
||||
|
||||
- *"Avoid exposing sensitive resources directly to the Internet; configure access controls to limit exposure."*
|
||||
- *"Apply the principle of least privilege when assigning permissions to users and services."*
|
||||
- *"Regularly review and update your security configurations to align with current best practices."*
|
||||
|
||||
## Remediation Code Guidelines
|
||||
|
||||
### Critical Requirement
|
||||
|
||||
The **fundamental principle** is to focus on the **specific change** that converts the finding from non-compliant to compliant.
|
||||
|
||||
Also is important to keep all code examples as short as possible, including the essential code to fix the issue. Remove any extra configuration, optional parameters, or nice-to-have settings and add comments to explain the code when possible.
|
||||
|
||||
### Common Guidelines for All Code Fields
|
||||
|
||||
1. **Be Minimal**: Keep code blocks as short as possible - only include what is absolutely necessary.
|
||||
2. **Focus on the Fix**: Remove any extra configuration, optional parameters, or nice-to-have settings.
|
||||
3. **Be Accurate**: Ensure all commands and code are syntactically correct.
|
||||
4. **Use Markdown Formatting**: Format code properly using code blocks and appropriate syntax highlighting.
|
||||
5. **Follow Best Practices**: Use the most secure and recommended approaches for each platform.
|
||||
|
||||
### CLI Guidelines
|
||||
|
||||
- Only provide a single command that directly changes the finding from fail to pass.
|
||||
- The command must be executable as-is and resolve the security issue completely.
|
||||
- Use proper command syntax for the provider (AWS CLI, Azure CLI, gcloud, kubectl, etc.).
|
||||
- Do not use markdown formatting or code blocks - just the raw command.
|
||||
- Do not include multiple commands, comments, or explanations.
|
||||
- If the issue cannot be resolved with a single command, leave this field empty.
|
||||
|
||||
### Native IaC Guidelines
|
||||
|
||||
- **Keep It Minimal**: Only include the specific resource/configuration that fixes the security issue.
|
||||
- Format as markdown code blocks with proper syntax highlighting.
|
||||
- Include only the required properties to fix the issue.
|
||||
- Add comments indicating the critical line(s) that remediate the check.
|
||||
- Use `example_resource` as the generic name for all resources and IDs.
|
||||
|
||||
### Terraform Guidelines
|
||||
|
||||
- **Keep It Minimal**: Only include the specific resource/configuration that fixes the security issue.
|
||||
- Provide valid HCL (HashiCorp Configuration Language) code with an example of a compliant configuration.
|
||||
- Use the latest Terraform syntax and provider versions.
|
||||
- Include only the required arguments to fix the issue - skip optional parameters.
|
||||
- Format as markdown code blocks with `hcl` syntax highlighting.
|
||||
- Add comments indicating the critical line(s) that remediate the check.
|
||||
- Use `example_resource` as the generic name for all resources and IDs.
|
||||
- Skip provider requirements unless critical for the fix.
|
||||
|
||||
### Other (Manual Steps) Guidelines
|
||||
|
||||
- **Keep It Minimal**: Only include the exact steps needed to fix the security issue.
|
||||
- Provide step-by-step instructions for manual remediation through web interfaces.
|
||||
- Use numbered lists for sequential steps.
|
||||
- Be specific about menu locations, button names, and settings.
|
||||
- Skip optional configurations or nice-to-have settings.
|
||||
- Format using markdown for better readability.
|
||||
|
||||
## Categories Guidelines
|
||||
|
||||
### Selection Guidelines
|
||||
|
||||
1. **Be Specific**: Only select categories that directly relate to what the automated control evaluates.
|
||||
2. **Primary Focus**: Consider the primary security concern the automated control addresses.
|
||||
3. **Avoid Over-Categorization**: Do not select categories just because they are tangentially related.
|
||||
|
||||
### Available Categories
|
||||
|
||||
| Category | Definition |
|
||||
|-------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| encryption | Ensures data is encrypted in transit and/or at rest, including key management practices |
|
||||
| internet-exposed | Checks that limit or flag public access to services, APIs, or assets from the Internet |
|
||||
| logging | Ensures appropriate logging of events, activities, and system interactions for traceability |
|
||||
| secrets | Manages and protects credentials, API keys, tokens, and other sensitive information |
|
||||
| resilience | Ensures systems can maintain availability and recover from disruptions, failures, or degradation. Includes redundancy, fault-tolerance, auto-scaling, backup, disaster recovery, and failover strategies |
|
||||
| threat-detection | Identifies suspicious activity or behaviors using IDS, malware scanning, or anomaly detection |
|
||||
| trust-boundaries | Enforces isolation or segmentation between different trust levels (e.g., VPCs, tenants, network zones) |
|
||||
| vulnerabilities | Detects or remediates known software, infrastructure, or config vulnerabilities (e.g., CVEs) |
|
||||
| cluster-security | Secures Kubernetes cluster components such as API server, etcd, and role-based access |
|
||||
| container-security | Ensures container images and runtimes follow security best practices |
|
||||
| node-security | Secures nodes running containers or services |
|
||||
| gen-ai | Checks related to safe and secure use of generative AI services or models |
|
||||
| ci-cd | Ensures secure configurations in CI/CD pipelines |
|
||||
| identity-access | Governs user and service identities, including least privilege, MFA, and permission boundaries |
|
||||
| email-security | Ensures detection and protection against phishing, spam, spoofing, etc. |
|
||||
| forensics-ready | Ensures systems are instrumented to support post-incident investigations. Any digital trace or evidence (logs, volume snapshots, memory dumps, network captures, etc.) preserved immutably and accompanied by integrity guarantees, which can be used in a forensic analysis |
|
||||
| software-supply-chain | Detects or prevents tampering, unauthorized packages, or third-party risks in software supply chain |
|
||||
| e3 | M365-specific controls enabled by or dependent on an E3 license (e.g., baseline security policies, conditional access) |
|
||||
| e5 | M365-specific controls enabled by or dependent on an E5 license (e.g., advanced threat protection, audit, DLP, and eDiscovery) |
|
||||
@@ -40,7 +40,7 @@ Each check in Prowler follows a straightforward structure. Within the newly crea
|
||||
|
||||
- `__init__.py` (empty file) – Ensures Python treats the check folder as a package.
|
||||
- `<check_name>.py` (code file) – Contains the check logic, following the prescribed format. Please refer to the [prowler's check code structure](./checks.md#prowlers-check-code-structure) for more information.
|
||||
- `<check_name>.metadata.json` (metadata file) – Defines the check's metadata for contextual information. Please refer to the [check metadata](./checks.md#) for more information.
|
||||
- `<check_name>.metadata.json` (metadata file) – Defines the check's metadata for contextual information. Please refer to the [check metadata](./checks.md#metadata-structure-for-prowler-checks) for more information.
|
||||
|
||||
## Prowler's Check Code Structure
|
||||
|
||||
@@ -226,68 +226,148 @@ Below is a generic example of a check metadata file. **Do not include comments i
|
||||
```json
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "example_check_id",
|
||||
"CheckTitle": "Example Check Title",
|
||||
"CheckType": ["Infrastructure Security"],
|
||||
"ServiceName": "ec2",
|
||||
"SubServiceName": "ami",
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "critical",
|
||||
"CheckID": "service_resource_security_setting",
|
||||
"CheckTitle": "Service resource has security setting enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "service",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Other",
|
||||
"Description": "Example description of the check.",
|
||||
"Risk": "Example risk if the check fails.",
|
||||
"RelatedUrl": "https://example.com",
|
||||
"Description": "This check verifies that the service resource has the required **security setting** enabled to protect against potential vulnerabilities.\n\nIt ensures that the resource follows security best practices and maintains proper access controls. The check evaluates whether the security configuration is properly implemented and active.",
|
||||
"Risk": "Without proper security settings, the resource may be vulnerable to:\n\n- **Unauthorized access** - Malicious actors could gain entry\n- **Data breaches** - Sensitive information could be compromised\n- **Security threats** - Various attack vectors could be exploited\n\nThis could result in compliance violations and potential financial or reputational damage.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": ["https://example.com/security-documentation", "https://example.com/best-practices"],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "example CLI command",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
"CLI": "provider-cli service enable-security-setting --resource-id resource-123",
|
||||
"NativeIaC": "```yaml\nType: Provider::Service::Resource\nProperties:\n SecuritySetting: enabled\n ResourceId: resource-123\n```",
|
||||
"Other": "1. Open the provider management console\n2. Navigate to the service section\n3. Select the resource\n4. Enable the security setting\n5. Save the configuration",
|
||||
"Terraform": "```hcl\nresource \"provider_service_resource\" \"example\" {\n resource_id = \"resource-123\"\n security_setting = true\n}\n```"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Example recommendation text.",
|
||||
"Url": "https://example.com/remediation"
|
||||
"Text": "Enable security settings on all service resources to ensure proper protection. Regularly review and update security configurations to align with current best practices.",
|
||||
"Url": "https://hub.prowler.com/check/service_resource_security_setting"
|
||||
}
|
||||
},
|
||||
"Categories": ["example-category"],
|
||||
"Categories": ["internet-exposed", "secrets"],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
"RelatedTo": ["service_resource_security_setting", "service_resource_security_setting_2"],
|
||||
"Notes": "This is a generic example check that should be customized for specific provider and service requirements."
|
||||
}
|
||||
```
|
||||
|
||||
### Metadata Fields and Their Purpose
|
||||
|
||||
- **Provider** — The Prowler provider related to the check. The name **must** be lowercase and match the provider folder name. For supported providers refer to [Prowler Hub](https://hub.prowler.com/check) or directly to [Prowler Code](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers).
|
||||
- **CheckID** — The unique identifier for the check inside the provider, this field **must** match the check's folder and python file and json metadata file name. For more information about the naming refer to the [Naming Format for Checks](#naming-format-for-checks) section.
|
||||
- **CheckTitle** — A concise, descriptive title for the check.
|
||||
- **CheckType** — *For now this field is only standardized for the AWS provider*.
|
||||
- For AWS this field must follow the [AWS Security Hub Types](https://docs.aws.amazon.com/securityhub/latest/userguide/asff-required-attributes.html#Types) format. So the common pattern to follow is `namespace/category/classifier`, refer to the attached documentation for the valid values for this fields.
|
||||
- **ServiceName** — The name of the provider service being audited. This field **must** be in lowercase and match with the service folder name. For supported services refer to [Prowler Hub](https://hub.prowler.com/check) or directly to [Prowler Code](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers).
|
||||
- **SubServiceName** — The subservice or resource within the service, if applicable. For more information refer to the [Naming Format for Checks](#naming-format-for-checks) section.
|
||||
- **ResourceIdTemplate** — A template for the unique resource identifier. For more information refer to the [Resource Identification in Prowler](#resource-identification-in-prowler) section.
|
||||
- **Severity** — The severity of the finding if the check fails. Must be one of: `critical`, `high`, `medium`, `low`, or `informational`, this field **must** be in lowercase. To get more information about the severity levels refer to the [Prowler's Check Severity Levels](#prowlers-check-severity-levels) section.
|
||||
- **ResourceType** — The type of resource being audited. *For now this field is only standardized for the AWS provider*.
|
||||
- For AWS use the [Security Hub resource types](https://docs.aws.amazon.com/securityhub/latest/userguide/asff-resources.html) or, if not available, the PascalCase version of the [CloudFormation type](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) (e.g., `AwsEc2Instance`). Use "Other" if no match exists.
|
||||
- **Description** — A short description of what the check does.
|
||||
- **Risk** — The risk or impact if the check fails, explaining why the finding matters.
|
||||
- **RelatedUrl** — A URL to official documentation or further reading about the check's purpose. If no official documentation is available, use the risk and recommendation text from trusted third-party sources.
|
||||
- **Remediation** — Guidance for fixing a failed check, including:
|
||||
- **Code** — Remediation commands or code snippets for CLI, Terraform, native IaC, or other tools like the Web Console.
|
||||
- **Recommendation** — A textual human readable recommendation. Here it is not necessary to include actual steps, but rather a general recommendation about what to do to fix the check.
|
||||
- **Categories** — One or more categories for grouping checks in execution (e.g., `internet-exposed`). For the current list of categories, refer to the [Prowler Hub](https://hub.prowler.com/check).
|
||||
- **DependsOn** — Currently not used.
|
||||
- **RelatedTo** — Currently not used.
|
||||
- **Notes** — Any additional information not covered by other fields.
|
||||
#### Provider
|
||||
|
||||
### Remediation Code Guidelines
|
||||
The Prowler provider related to the check. The name **must** be lowercase and match the provider folder name. For supported providers refer to [Prowler Hub](https://hub.prowler.com/check) or directly to [Prowler Code](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers).
|
||||
|
||||
When providing remediation steps, reference the following sources:
|
||||
#### CheckID
|
||||
|
||||
- Official provider documentation.
|
||||
- [Prowler Checks Remediation Index](https://docs.prowler.com/checks/checks-index)
|
||||
- [TrendMicro Cloud One Conformity](https://www.trendmicro.com/cloudoneconformity)
|
||||
- [CloudMatos Remediation Repository](https://github.com/cloudmatos/matos/tree/master/remediations)
|
||||
The unique identifier for the check inside the provider. This field **must** match the check's folder, Python file, and JSON metadata file name. For more information about naming, refer to the [Naming Format for Checks](#naming-format-for-checks) section.
|
||||
|
||||
#### CheckTitle
|
||||
|
||||
The `CheckTitle` field must be plain text, clearly and succinctly define **the best practice being evaluated and which resource(s) each finding applies to**. The title should be specific, concise (no more than 150 characters), and reference the relevant resource(s) involved.
|
||||
|
||||
**Always write the `CheckTitle` to describe the *PASS* case**, the desired secure or compliant state of the resource(s). This helps ensure that findings are easy to interpret and that the title always reflects the best practice being met.
|
||||
|
||||
For detailed guidelines on writing effective check titles, including how to determine singular vs. plural scope and common mistakes to avoid, see [Check Title Guidelines](./check-metadata-guidelines.md#check-title-guidelines).
|
||||
|
||||
#### CheckType
|
||||
|
||||
???+ warning
|
||||
This field is only applicable to the AWS provider.
|
||||
|
||||
It follows the [AWS Security Hub Types](https://docs.aws.amazon.com/securityhub/latest/userguide/asff-required-attributes.html#Types) format using the pattern `namespace/category/classifier`.
|
||||
|
||||
For the complete AWS Security Hub selection guidelines, see [Check Type Guidelines](./check-metadata-guidelines.md#check-type-guidelines-aws-only).
|
||||
|
||||
#### ServiceName
|
||||
|
||||
The name of the provider service being audited. Must be lowercase and match the service folder name. For supported services refer to [Prowler Hub](https://hub.prowler.com/check) or the [Prowler Code](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers).
|
||||
|
||||
#### SubServiceName
|
||||
|
||||
This field is in the process of being deprecated and should be **left empty**.
|
||||
|
||||
#### ResourceIdTemplate
|
||||
|
||||
This field is in the process of being deprecated and should be **left empty**.
|
||||
|
||||
#### Severity
|
||||
|
||||
Severity level if the check fails. Must be one of: `critical`, `high`, `medium`, `low`, or `informational`, and written in lowercase. See [Prowler's Check Severity Levels](#prowlers-check-severity-levels) for details.
|
||||
|
||||
#### ResourceType
|
||||
|
||||
The type of resource being audited. This field helps categorize and organize findings by resource type for better analysis and reporting. For each provider:
|
||||
|
||||
- **AWS**: Use [Security Hub resource types](https://docs.aws.amazon.com/securityhub/latest/userguide/asff-resources.html) or PascalCase CloudFormation types removing the `::` separator used in CloudFormation templates (e.g., in CloudFormation template the type of an EC2 instance is `AWS::EC2::Instance` but in the check it should be `AwsEc2Instance`). Use `Other` if none apply.
|
||||
- **Azure**: Use types from [Azure Resource Graph](https://learn.microsoft.com/en-us/azure/governance/resource-graph/reference/supported-tables-resources), for example: `Microsoft.Storage/storageAccounts`.
|
||||
- **Google Cloud**: Use [Cloud Asset Inventory asset types](https://cloud.google.com/asset-inventory/docs/asset-types), for example: `compute.googleapis.com/Instance`.
|
||||
- **Kubernetes**: Use types shown under `KIND` from `kubectl api-resources`.
|
||||
- **M365 / GitHub**: Leave empty due to lack of standardized types.
|
||||
|
||||
#### Description
|
||||
|
||||
A concise, natural language explanation that **clearly describes what the finding means**, focusing on clarity and context rather than technical implementation details. Use simple paragraphs with line breaks if needed, but avoid sections, code blocks, or complex formatting. This field is limited to maximum 400 characters.
|
||||
|
||||
For detailed writing guidelines and common mistakes to avoid, see [Description Guidelines](./check-metadata-guidelines.md#description-guidelines).
|
||||
|
||||
#### Risk
|
||||
|
||||
A clear, natural language explanation of **why this finding poses a cybersecurity risk**. Focus on how it may impact confidentiality, integrity, or availability. If those do not apply, describe any relevant operational or financial risks. Use simple paragraphs with line breaks if needed, but avoid sections, code blocks, or complex formatting. Limit your explanation to 400 characters.
|
||||
|
||||
For detailed writing guidelines and common mistakes to avoid, see [Risk Guidelines](./check-metadata-guidelines.md#risk-guidelines).
|
||||
|
||||
#### RelatedUrl
|
||||
|
||||
*Deprecated*. Use `AdditionalURLs` for adding your URLs references.
|
||||
|
||||
#### AdditionalURLs
|
||||
|
||||
???+ warning
|
||||
URLs must be valid and not repeated.
|
||||
|
||||
A list of official documentation URLs for further reading. These should be authoritative sources that provide additional context, best practices, or detailed information about the security control being checked. Prefer official provider documentation, security standards, or well-established security resources. Avoid third-party blogs or unofficial sources unless they are highly reputable and directly relevant.
|
||||
|
||||
#### Remediation
|
||||
|
||||
Provides both code examples and best practice recommendations for addressing the security issue.
|
||||
|
||||
- **Code**: Contains remediation examples in different formats:
|
||||
- **CLI**: Command-line interface commands to make the finding compliant in runtime.
|
||||
- **NativeIaC**: Native Infrastructure as Code templates with an example of a compliant configuration. For now it applies to:
|
||||
- **AWS**: CloudFormation YAML formatted code (do not use JSON format).
|
||||
- **Azure**: Bicep formatted code (do not use ARM templates).
|
||||
- **Terraform**: HashiCorp Configuration Language (HCL) code with an example of a compliant configuration.
|
||||
- **Other**: Manual steps through web interfaces or other tools to make the finding compliant.
|
||||
|
||||
For detailed guidelines on writing remediation code, see [Remediation Code Guidelines](./check-metadata-guidelines.md#remediation-code-guidelines).
|
||||
|
||||
- **Recommendation**
|
||||
- **Text**: Generic best practice guidance in natural language using Markdown format (maximum 400 characters). For writing guidelines, see [Recommendation Guidelines](./check-metadata-guidelines.md#recommendation-guidelines).
|
||||
- **Url**: [Prowler Hub URL](https://hub.prowler.com/) of the check. This URL is always composed by `https://hub.prowler.com/check/<check_id>`.
|
||||
|
||||
#### Categories
|
||||
|
||||
One or more functional groupings used for execution filtering (e.g., `internet-exposed`). You can define new categories just by adding to this field.
|
||||
|
||||
For the complete list of available categories, see [Categories Guidelines](./check-metadata-guidelines.md#categories-guidelines).
|
||||
|
||||
#### DependsOn
|
||||
|
||||
List of check IDs of checks that if are compliant, this check will be a compliant too or it is not going to give any finding.
|
||||
|
||||
#### RelatedTo
|
||||
|
||||
List of check IDs of checks that are conceptually related, even if they do not share a technical dependency.
|
||||
|
||||
#### Notes
|
||||
|
||||
Any additional information not covered in the above fields.
|
||||
|
||||
### Python Model Reference
|
||||
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
# LLM Provider
|
||||
|
||||
This page details the [Large Language Model (LLM)](https://en.wikipedia.org/wiki/Large_language_model) provider implementation in Prowler.
|
||||
|
||||
The LLM provider enables security testing of language models using red team techniques. By default, Prowler uses the built-in LLM configuration that targets OpenAI models with comprehensive security test suites. To configure it, follow the [LLM getting started guide](../tutorials/llm/getting-started-llm.md).
|
||||
|
||||
## LLM Provider Classes Architecture
|
||||
|
||||
The LLM provider implementation follows the general [Provider structure](./provider.md). This section focuses on the LLM-specific implementation, highlighting how the generic provider concepts are realized for LLM security testing in Prowler. For a full overview of the provider pattern, base classes, and extension guidelines, see [Provider documentation](./provider.md).
|
||||
|
||||
### Main Class
|
||||
|
||||
- **Location:** [`prowler/providers/llm/llm_provider.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/llm/llm_provider.py)
|
||||
- **Base Class:** Inherits from `Provider` (see [base class details](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/common/provider.py)).
|
||||
- **Purpose:** Central orchestrator for LLM-specific logic, configuration management, and integration with promptfoo for red team testing.
|
||||
- **Key LLM Responsibilities:**
|
||||
- Initializes and manages LLM configuration using promptfoo.
|
||||
- Validates configuration and sets up the LLM testing context.
|
||||
- Loads and manages red team test configuration, plugins, and target models.
|
||||
- Provides properties and methods for downstream LLM security testing.
|
||||
- Integrates with promptfoo for comprehensive LLM security evaluation.
|
||||
|
||||
### Data Models
|
||||
|
||||
- **Location:** [`prowler/providers/llm/models.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/llm/models.py)
|
||||
- **Purpose:** Define structured data for LLM output options and configuration.
|
||||
- **Key LLM Models:**
|
||||
- `LLMOutputOptions`: Customizes output filename logic for LLM-specific reporting.
|
||||
|
||||
### LLM Security Testing Integration
|
||||
|
||||
- **Location:** [`prowler/providers/llm/llm_provider.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/llm/llm_provider.py)
|
||||
- **Purpose:** Integrates with promptfoo for comprehensive LLM security testing.
|
||||
- **Key LLM Responsibilities:**
|
||||
- Executes promptfoo red team evaluations against target LLMs.
|
||||
- Processes security test results and converts them to Prowler reports.
|
||||
- Manages test concurrency and progress tracking.
|
||||
- Handles real-time streaming of test results.
|
||||
|
||||
### Configuration Management
|
||||
|
||||
The LLM provider uses promptfoo configuration files to define:
|
||||
|
||||
- **Target Models**: The LLM models to test (e.g., OpenAI GPT, Anthropic Claude)
|
||||
- **Red Team Plugins**: Security test suites (OWASP, MITRE, NIST, EU AI Act)
|
||||
- **Test Parameters**: Concurrency, test counts, and evaluation criteria
|
||||
|
||||
### Default Configuration
|
||||
|
||||
Prowler includes a comprehensive default LLM configuration that:
|
||||
|
||||
- Targets OpenAI models by default
|
||||
- Includes multiple security test frameworks (OWASP, MITRE, NIST, EU AI Act)
|
||||
- Provides extensive test coverage for LLM security vulnerabilities
|
||||
- Supports custom configuration for specific testing needs
|
||||
|
||||
## Specific Patterns in LLM Security Testing
|
||||
|
||||
The LLM provider implements security testing through integration with promptfoo, following these patterns:
|
||||
|
||||
### Red Team Testing Framework
|
||||
|
||||
- **Plugin-based Architecture**: Uses promptfoo plugins for different security test categories
|
||||
- **Comprehensive Coverage**: Includes OWASP LLM Top 10, MITRE ATLAS, NIST AI Risk Management, and EU AI Act compliance
|
||||
- **Real-Time Evaluation**: Streams test results as they are generated
|
||||
- **Progress Tracking**: Provides detailed progress information during test execution
|
||||
|
||||
### Test Execution Flow
|
||||
|
||||
1. **Configuration Loading**: Loads promptfoo configuration with target models and test plugins
|
||||
2. **Test Generation**: Generates security test cases based on configured plugins
|
||||
3. **Concurrent Execution**: Runs tests with configurable concurrency limits
|
||||
4. **Result Processing**: Converts promptfoo results to Prowler security reports
|
||||
5. **Progress Monitoring**: Tracks and displays test execution progress
|
||||
|
||||
### Security Test Categories
|
||||
|
||||
The LLM provider supports comprehensive security testing across multiple frameworks:
|
||||
|
||||
- **OWASP LLM Top 10**: Covers prompt injection, data leakage, and model security
|
||||
- **MITRE ATLAS**: Adversarial threat landscape for AI systems
|
||||
- **NIST AI Risk Management**: AI system risk assessment and mitigation
|
||||
- **EU AI Act**: European Union AI regulation compliance
|
||||
- **Custom Tests**: Support for organization-specific security requirements
|
||||
|
||||
## Error Handling and Validation
|
||||
|
||||
The LLM provider includes comprehensive error handling for:
|
||||
|
||||
- **Configuration Validation**: Ensures valid promptfoo configuration files
|
||||
- **Model Access**: Handles authentication and access issues with target LLMs
|
||||
- **Test Execution**: Manages test failures and timeout scenarios
|
||||
- **Result Processing**: Handles malformed or incomplete test results
|
||||
|
||||
## Integration with Prowler Ecosystem
|
||||
|
||||
The LLM provider seamlessly integrates with Prowler's existing infrastructure:
|
||||
|
||||
- **Output Formats**: Supports all Prowler output formats (JSON, CSV, HTML, etc.)
|
||||
- **Compliance Frameworks**: Integrates with Prowler's compliance reporting
|
||||
- **Fixer Integration**: Supports automated remediation recommendations
|
||||
- **Dashboard Integration**: Compatible with Prowler App for centralized management
|
||||
@@ -101,6 +101,7 @@ Prowler supports multiple output formats, allowing users to tailor findings pres
|
||||
finding_dict["DESCRIPTION"] = finding.metadata.Description
|
||||
finding_dict["RISK"] = finding.metadata.Risk
|
||||
finding_dict["RELATED_URL"] = finding.metadata.RelatedUrl
|
||||
finding_dict["ADDITIONAL_URLS"] = unroll_list(finding.metadata.AdditionalURLs)
|
||||
finding_dict["REMEDIATION_RECOMMENDATION_TEXT"] = (
|
||||
finding.metadata.Remediation.Recommendation.Text
|
||||
)
|
||||
|
||||
@@ -0,0 +1,210 @@
|
||||
# Renaming Checks in Prowler
|
||||
|
||||
To rename a check in Prowler, follow these steps when aligning with Check ID structure, fixing typos, or updating check logic that requires a new name.
|
||||
|
||||
When changing a Check ID, update the following files:
|
||||
|
||||
## Update Check Folder Structure
|
||||
|
||||
First, rename the check folder with the new check name.
|
||||
|
||||
**Path:** `prowler/providers/<provider>/services/<service>/<check_name>`
|
||||
|
||||
**Example:**
|
||||
```
|
||||
# Before
|
||||
prowler/providers/aws/services/inspector2/inspector2_findings_exist/
|
||||
|
||||
# After
|
||||
prowler/providers/aws/services/inspector2/inspector2_active_findings_exist/
|
||||
```
|
||||
|
||||
Next, rename the file that contains the check logic. Inside that file, also rename the class name to match the new check name.
|
||||
|
||||
**Path:** `prowler/providers/<provider>/services/<service>/<check_name>/<check_name>.py`
|
||||
|
||||
**Example:**
|
||||
```python
|
||||
# Before
|
||||
class inspector2_findings_exist(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
# ... check logic ...
|
||||
|
||||
# After
|
||||
class inspector2_active_findings_exist(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
# ... check logic ...
|
||||
```
|
||||
|
||||
Then, rename the file that contains the check metadata. Inside that file, add the old check name as an alias in the `CheckAliases` field and modify the `CheckID` to the new check name.
|
||||
|
||||
**Path:** `prowler/providers/<provider>/services/<service>/<check_name>/<check_name>.metadata.json`
|
||||
|
||||
**Example:**
|
||||
```json
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "inspector2_active_findings_exist",
|
||||
"CheckTitle": "Check if Inspector2 active findings exist",
|
||||
"CheckAliases": [
|
||||
"inspector2_findings_exist"
|
||||
],
|
||||
"CheckType": [],
|
||||
"ServiceName": "inspector2",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:aws:inspector2:region:account-id/detector-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Other",
|
||||
"Description": "This check determines if there are any active findings in your AWS account that have been detected by AWS Inspector2.",
|
||||
"Risk": "Without using AWS Inspector, you may not be aware of all the security vulnerabilities in your AWS resources.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/inspector/latest/user/findings-understanding.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/Inspector/amazon-inspector-findings.html",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Review the active findings from Inspector2",
|
||||
"Url": "https://docs.aws.amazon.com/inspector/latest/user/what-is-inspector.html"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
```
|
||||
|
||||
## Update Test Files
|
||||
|
||||
Second, rename the tests folder with the new check name.
|
||||
|
||||
**Path:** `tests/providers/<provider>/services/<service>/<check_name>`
|
||||
|
||||
**Example:**
|
||||
```
|
||||
# Before
|
||||
tests/providers/aws/services/inspector2/inspector2_findings_exist/
|
||||
|
||||
# After
|
||||
tests/providers/aws/services/inspector2/inspector2_active_findings_exist/
|
||||
```
|
||||
|
||||
Next, rename the test file that contains all the unit tests. Inside that file, rename all appearances of the old check name to the new check name.
|
||||
|
||||
**Path:** `tests/providers/<provider>/services/<service>/<check_name>/<check_name>_test.py`
|
||||
|
||||
**Example:**
|
||||
```python
|
||||
# Before
|
||||
from prowler.providers.aws.services.inspector2.inspector2_findings_exist.inspector2_findings_exist import (
|
||||
inspector2_findings_exist,
|
||||
)
|
||||
|
||||
class Test_inspector2_findings_exist:
|
||||
def test_inspector2_no_findings(self):
|
||||
# ... test logic ...
|
||||
|
||||
def test_inspector2_with_findings(self):
|
||||
# ... test logic ...
|
||||
|
||||
# After
|
||||
from prowler.providers.aws.services.inspector2.inspector2_active_findings_exist.inspector2_active_findings_exist import (
|
||||
inspector2_active_findings_exist,
|
||||
)
|
||||
|
||||
class Test_inspector2_active_findings_exist:
|
||||
def test_inspector2_no_findings(self):
|
||||
# ... test logic ...
|
||||
|
||||
def test_inspector2_with_findings(self):
|
||||
# ... test logic ...
|
||||
```
|
||||
|
||||
**Important:** Update all references to the old check name in the test file, including:
|
||||
|
||||
- Import statements at the top of the file
|
||||
- Class name in the test class
|
||||
- Any function calls to the check
|
||||
- Any string references to the check name
|
||||
- Mock patches that reference the check
|
||||
|
||||
**Complete example of all changes needed in test files:**
|
||||
```python
|
||||
# Before
|
||||
from prowler.providers.aws.services.inspector2.inspector2_findings_exist.inspector2_findings_exist import (
|
||||
inspector2_findings_exist,
|
||||
)
|
||||
|
||||
class Test_inspector2_findings_exist:
|
||||
def test_inspector2_no_findings(self):
|
||||
# Mock setup
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.inspector2.inspector2_findings_exist.inspector2_findings_exist.inspector2_client",
|
||||
inspector2_client,
|
||||
):
|
||||
check = inspector2_findings_exist()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert "No active findings found" in result[0].status_extended
|
||||
|
||||
# After
|
||||
from prowler.providers.aws.services.inspector2.inspector2_active_findings_exist.inspector2_active_findings_exist import (
|
||||
inspector2_active_findings_exist,
|
||||
)
|
||||
|
||||
class Test_inspector2_active_findings_exist:
|
||||
def test_inspector2_no_findings(self):
|
||||
# Mock setup
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.inspector2.inspector2_active_findings_exist.inspector2_active_findings_exist.inspector2_client",
|
||||
inspector2_client,
|
||||
):
|
||||
check = inspector2_active_findings_exist()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert "No active findings found" in result[0].status_extended
|
||||
```
|
||||
|
||||
## Update Compliance Mappings
|
||||
|
||||
Finally, rename all appearances of the old check name to the new check name inside any compliance framework where the check is mapped.
|
||||
|
||||
- `prowler/compliance/<service>/<compliance_where_the_check_is_mapped>.json`
|
||||
|
||||
**Example:**
|
||||
```json
|
||||
{
|
||||
"Framework": "CIS",
|
||||
"Version": "2.0",
|
||||
"Provider": "AWS",
|
||||
"Description": "The CIS Amazon Web Services Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Amazon Web Services.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "4.1",
|
||||
"Description": "Ensure a log metric filter and alarm exist for unauthorized API calls",
|
||||
"Checks": [
|
||||
"inspector2_active_findings_exist"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "4 Logging and Monitoring",
|
||||
"Profile": "Level 1",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "Real-time monitoring of API calls can be achieved by directing CloudTrail Logs to CloudWatch Logs and establishing corresponding metric filters and alarms."
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
The development compliance file may contain examples of the check being renamed. If so, modify this file as well:
|
||||
|
||||
- `api/src/backend/api/fixtures/dev/7_dev_compliance.json`
|
||||
@@ -14,6 +14,7 @@ The official supported providers right now are:
|
||||
| **Github** | Official | Stable | UI, API, CLI |
|
||||
| **IaC** | Official | Beta | CLI |
|
||||
| **MongoDB Atlas** | Official | Beta | CLI |
|
||||
| **LLM** | Official | Beta | CLI |
|
||||
| **NHN** | Unofficial | Beta | CLI |
|
||||
|
||||
Prowler supports **auditing, incident response, continuous monitoring, hardening, forensic readiness, and remediation**.
|
||||
|
||||
@@ -4,6 +4,9 @@ Prowler App supports multiple installation methods based on your environment.
|
||||
|
||||
Refer to the [Prowler App Tutorial](../tutorials/prowler-app.md) for detailed usage instructions.
|
||||
|
||||
???+ warning
|
||||
Prowler configuration is based in `.env` files. Every version of Prowler can have differences on that file, so, please, use the file that corresponds with that version or repository branch or tag.
|
||||
|
||||
=== "Docker Compose"
|
||||
|
||||
_Requirements_:
|
||||
@@ -25,6 +28,9 @@ Refer to the [Prowler App Tutorial](../tutorials/prowler-app.md) for detailed us
|
||||
???+ note
|
||||
You can change the environment variables in the `.env` file. Note that it is not recommended to use the default values in production environments.
|
||||
|
||||
???+ note
|
||||
For a secure setup, leave empty or remove `DJANGO_TOKEN_SIGNING_KEY` and `DJANGO_TOKEN_VERIFYING_KEY` in `.env` before first start. When absent, the API auto‑generates a unique key pair and stores it in `~/.config/prowler-api` (non-container) or the bound Docker volume in `_data/api` (container). Never commit or reuse static/default keys. To rotate, delete the stored key files and restart the API.
|
||||
|
||||
???+ note
|
||||
There is a development mode available, you can use the file https://github.com/prowler-cloud/prowler/blob/master/docker-compose-dev.yml to run the app in development mode.
|
||||
|
||||
|
||||
@@ -2,7 +2,11 @@
|
||||
|
||||
Prowler requires AWS credentials to function properly. Authentication is available through the following methods:
|
||||
|
||||
- Static Credentials
|
||||
- Assumed Role
|
||||
|
||||
## Required Permissions
|
||||
|
||||
To ensure full functionality, attach the following AWS managed policies to the designated user or role:
|
||||
|
||||
- `arn:aws:iam::aws:policy/SecurityAudit`
|
||||
@@ -13,37 +17,114 @@ To ensure full functionality, attach the following AWS managed policies to the d
|
||||
For certain checks, additional read-only permissions are required. Attach the following custom policy to your role: [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json)
|
||||
|
||||
|
||||
## Configure AWS Credentials
|
||||
## Assume Role (Recommended)
|
||||
|
||||
Use one of the following methods to authenticate:
|
||||
This method grants permanent access and is the recommended setup for production environments.
|
||||
|
||||
```console
|
||||
aws configure
|
||||
```
|
||||
=== "CloudFormation"
|
||||
|
||||
or
|
||||
1. Download the [Prowler Scan Role Template](https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/master/permissions/templates/cloudformation/prowler-scan-role.yml)
|
||||
|
||||
```console
|
||||
export AWS_ACCESS_KEY_ID="ASXXXXXXX"
|
||||
export AWS_SECRET_ACCESS_KEY="XXXXXXXXX"
|
||||
export AWS_SESSION_TOKEN="XXXXXXXXX"
|
||||
```
|
||||

|
||||
|
||||
These credentials must be associated with a user or role with the necessary permissions to perform security checks.
|
||||

|
||||
|
||||
2. Open the [AWS Console](https://console.aws.amazon.com), search for **CloudFormation**
|
||||
|
||||

|
||||
|
||||
## AWS Profiles
|
||||
3. Go to **Stacks** and click "Create stack" > "With new resources (standard)"
|
||||
|
||||
Specify a custom AWS profile using the following command:
|
||||

|
||||
|
||||
```console
|
||||
prowler aws -p/--profile <profile_name>
|
||||
```
|
||||
4. In **Specify Template**, choose "Upload a template file" and select the downloaded file
|
||||
|
||||
## Multi-Factor Authentication (MFA)
|
||||

|
||||

|
||||
|
||||
For IAM entities requiring Multi-Factor Authentication (MFA), use the `--mfa` flag. Prowler prompts for the following values to initiate a new session:
|
||||
5. Click "Next", provide a stack name and the **External ID** shown in the Prowler Cloud setup screen
|
||||
|
||||
- **ARN of your MFA device**
|
||||
- **TOTP (Time-Based One-Time Password)**
|
||||

|
||||

|
||||
|
||||
!!! info
|
||||
An **External ID** is required when assuming the *ProwlerScan* role to comply with AWS [confused deputy prevention](https://docs.aws.amazon.com/IAM/latest/UserGuide/confused-deputy.html).
|
||||
|
||||
6. Acknowledge the IAM resource creation warning and proceed
|
||||
|
||||

|
||||
|
||||
7. Click "Submit" to deploy the stack
|
||||
|
||||

|
||||
|
||||
=== "Terraform"
|
||||
|
||||
To provision the scan role using Terraform:
|
||||
|
||||
1. Run the following commands:
|
||||
|
||||
```bash
|
||||
terraform init
|
||||
terraform plan
|
||||
terraform apply
|
||||
```
|
||||
|
||||
2. During `plan` and `apply`, provide the **External ID** when prompted, which is available in the Prowler Cloud or Prowler App UI:
|
||||
|
||||

|
||||
|
||||
> 💡 Note: Terraform will use the AWS credentials of the default profile.
|
||||
|
||||
---
|
||||
|
||||
## Credentials
|
||||
=== "Long term credentials"
|
||||
|
||||
1. Go to the [AWS Console](https://console.aws.amazon.com), open **CloudShell**
|
||||
|
||||

|
||||
|
||||
2. Run:
|
||||
|
||||
```bash
|
||||
aws iam create-access-key
|
||||
```
|
||||
|
||||
3. Copy the output containing:
|
||||
|
||||
- `AccessKeyId`
|
||||
- `SecretAccessKey`
|
||||
|
||||

|
||||
|
||||
=== "Short term credentials (Recommended)"
|
||||
|
||||
Use the [AWS Access Portal](https://docs.aws.amazon.com/singlesignon/latest/userguide/howtogetcredentials.html) or the CLI:
|
||||
|
||||
1. Retrieve short-term credentials for the IAM identity using this command:
|
||||
|
||||
```bash
|
||||
aws sts get-session-token --duration-seconds 900
|
||||
```
|
||||
|
||||
???+ note
|
||||
Check the aws documentation [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/sts_example_sts_GetSessionToken_section.html)
|
||||
|
||||
2. Copy the output containing:
|
||||
|
||||
- `AccessKeyId`
|
||||
- `SecretAccessKey`
|
||||
- `SessionToken`
|
||||
|
||||
> Sample output:
|
||||
```json
|
||||
{
|
||||
"Credentials": {
|
||||
"AccessKeyId": "ASIAIOSFODNN7EXAMPLE",
|
||||
"SecretAccessKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY",
|
||||
"SessionToken": "AQoEXAMPLEH4aoAH0gNCAPyJxz4BlCFFxWNE1OPTgk5TthT+FvwqnKwRcOIfrRh3c/LTo6UDdyJwOOvEVPvLXCrrrUtdnniCEXAMPLE/IvU1dYUg2RVAJBanLiHb4IgRmpRV3zrkuWJOgQs8IZZaIv2BXIa2R4OlgkBN9bkUDNCJiBeb/AXlzBBko7b15fjrBs2+cTQtpZ3CYWFXG8C5zqx37wnOE49mRl/+OtkIKGO7fAE",
|
||||
"Expiration": "2020-05-19T18:06:10+00:00"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,39 +1,31 @@
|
||||
# Getting Started with AWS on Prowler Cloud/App
|
||||
# Getting Started With AWS on Prowler
|
||||
|
||||
## Prowler App
|
||||
|
||||
<iframe width="560" height="380" src="https://www.youtube-nocookie.com/embed/RPgIWOCERzY" title="Prowler Cloud Onboarding AWS" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen="1"></iframe>
|
||||
|
||||
Set up your AWS account to enable security scanning using Prowler Cloud/App.
|
||||
> Walkthrough video onboarding an AWS Account using Assumed Role.
|
||||
|
||||
## Requirements
|
||||
|
||||
To configure your AWS account, you’ll need:
|
||||
|
||||
1. Access to Prowler Cloud/App
|
||||
2. Properly configured AWS credentials (either static or via an assumed IAM role)
|
||||
|
||||
---
|
||||
|
||||
## Step 1: Get Your AWS Account ID
|
||||
### Step 1: Get Your AWS Account ID
|
||||
|
||||
1. Log in to the [AWS Console](https://console.aws.amazon.com)
|
||||
2. Locate your AWS account ID in the top-right dropdown menu
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## Step 2: Access Prowler Cloud/App
|
||||
### Step 2: Access Prowler Cloud or Prowler App
|
||||
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](../prowler-app.md)
|
||||
2. Go to `Configuration` > `Cloud Providers`
|
||||
2. Go to "Configuration" > "Cloud Providers"
|
||||
|
||||

|
||||
|
||||
3. Click `Add Cloud Provider`
|
||||
3. Click "Add Cloud Provider"
|
||||
|
||||

|
||||
|
||||
4. Select `Amazon Web Services`
|
||||
4. Select "Amazon Web Services"
|
||||
|
||||

|
||||
|
||||
@@ -41,96 +33,39 @@ To configure your AWS account, you’ll need:
|
||||
|
||||

|
||||
|
||||
6. Choose your preferred authentication method (next step)
|
||||
6. Choose the preferred authentication method (next step)
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## Step 3: Set Up AWS Authentication
|
||||
### Step 3: Set Up AWS Authentication
|
||||
|
||||
Before proceeding, choose your preferred authentication mode:
|
||||
Before proceeding, choose the preferred authentication mode:
|
||||
|
||||
Credentials
|
||||
**Credentials**
|
||||
|
||||
* Quick scan as current user ✅
|
||||
* No extra setup ✅
|
||||
* Credentials time out ❌
|
||||
* Quick scan as current user
|
||||
* No extra setup
|
||||
* Credentials time out
|
||||
|
||||
Assumed Role
|
||||
**Assumed Role**
|
||||
|
||||
* Preferred Setup
|
||||
* Permanent Credentials
|
||||
* Requires access to create role
|
||||
|
||||
* Preferred Setup ✅
|
||||
* Permanent Credentials ✅
|
||||
* Requires access to create role ❌
|
||||
|
||||
---
|
||||
|
||||
### 🔐 Assume Role (Recommended)
|
||||
|
||||

|
||||
#### Assume Role (Recommended)
|
||||
|
||||
This method grants permanent access and is the recommended setup for production environments.
|
||||
|
||||
=== "CloudFormation"
|
||||

|
||||
|
||||
1. Download the [Prowler Scan Role Template](https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/master/permissions/templates/cloudformation/prowler-scan-role.yml)
|
||||
For detailed instructions on how to create the role, see [Authentication > Assume Role](./authentication.md#assume-role-recommended).
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
2. Open the [AWS Console](https://console.aws.amazon.com), search for **CloudFormation**
|
||||
|
||||

|
||||
|
||||
3. Go to **Stacks** and click `Create stack` > `With new resources (standard)`
|
||||
|
||||

|
||||
|
||||
4. In **Specify Template**, choose `Upload a template file` and select the downloaded file
|
||||
|
||||

|
||||

|
||||
|
||||
5. Click `Next`, provide a stack name and the **External ID** shown in the Prowler Cloud setup screen
|
||||
|
||||

|
||||

|
||||
|
||||
!!! info
|
||||
An **External ID** is required when assuming the *ProwlerScan* role to comply with AWS [confused deputy prevention](https://docs.aws.amazon.com/IAM/latest/UserGuide/confused-deputy.html).
|
||||
|
||||
6. Acknowledge the IAM resource creation warning and proceed
|
||||
|
||||

|
||||
|
||||
7. Click `Submit` to deploy the stack
|
||||
|
||||

|
||||
|
||||
=== "Terraform"
|
||||
|
||||
To provision the scan role using Terraform:
|
||||
|
||||
1. Run the following commands:
|
||||
|
||||
```bash
|
||||
terraform init
|
||||
terraform plan
|
||||
terraform apply
|
||||
```
|
||||
|
||||
2. During `plan` and `apply`, you will be prompted for the **External ID**, which is available in the Prowler Cloud/App UI:
|
||||
|
||||

|
||||
|
||||
> 💡 Note: Terraform will use the AWS credentials of your default profile.
|
||||
|
||||
---
|
||||
|
||||
### Finish Setup with Assume Role
|
||||
|
||||
8. Once the role is created, go to the **IAM Console**, click on the `ProwlerScan` role to open its details:
|
||||
8. Once the role is created, go to the **IAM Console**, click on the "ProwlerScan" role to open its details:
|
||||
|
||||

|
||||
|
||||
@@ -138,80 +73,69 @@ This method grants permanent access and is the recommended setup for production
|
||||
|
||||

|
||||
|
||||
10. Paste the ARN into the corresponding field in Prowler Cloud/App
|
||||
10. Paste the ARN into the corresponding field in Prowler Cloud or Prowler App
|
||||
|
||||

|
||||
|
||||
11. Click `Next`, then `Launch Scan`
|
||||
11. Click "Next", then "Launch Scan"
|
||||
|
||||

|
||||

|
||||
|
||||
---
|
||||
|
||||
### 🔑 Credentials (Static Access Keys)
|
||||
#### Credentials (Static Access Keys)
|
||||
|
||||
You can also configure your AWS account using static credentials (not recommended for long-term use):
|
||||
AWS accounts can also be configured using static credentials (not recommended for long-term use):
|
||||
|
||||

|
||||
|
||||
=== "Long term credentials"
|
||||
For detailed instructions on how to create the credentials, see [Authentication > Credentials](./authentication.md#credentials).
|
||||
|
||||
1. Go to the [AWS Console](https://console.aws.amazon.com), open **CloudShell**
|
||||
1. Complete the form in Prowler Cloud or Prowler App and click "Next"
|
||||
|
||||

|
||||

|
||||
|
||||
2. Run:
|
||||
2. Click "Launch Scan"
|
||||
|
||||
```bash
|
||||
aws iam create-access-key
|
||||
```
|
||||

|
||||
|
||||
3. Copy the output containing:
|
||||
---
|
||||
|
||||
- `AccessKeyId`
|
||||
- `SecretAccessKey`
|
||||
## Prowler CLI
|
||||
|
||||

|
||||
### Configure AWS Credentials
|
||||
|
||||
> ⚠️ Save these credentials securely and paste them into the Prowler Cloud/App setup screen.
|
||||
To authenticate with AWS, use one of the following methods:
|
||||
|
||||
=== "Short term credentials (Recommended)"
|
||||
```console
|
||||
aws configure
|
||||
```
|
||||
|
||||
You can use your [AWS Access Portal](https://docs.aws.amazon.com/singlesignon/latest/userguide/howtogetcredentials.html) or the CLI:
|
||||
or
|
||||
|
||||
1. Retrieve short-term credentials for the IAM identity using this command:
|
||||
```console
|
||||
export AWS_ACCESS_KEY_ID="ASXXXXXXX"
|
||||
export AWS_SECRET_ACCESS_KEY="XXXXXXXXX"
|
||||
export AWS_SESSION_TOKEN="XXXXXXXXX"
|
||||
```
|
||||
|
||||
```bash
|
||||
aws sts get-session-token --duration-seconds 900
|
||||
```
|
||||
These credentials must be associated with a user or role with the necessary permissions to perform security checks.
|
||||
|
||||
???+ note
|
||||
Check the aws documentation [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/sts_example_sts_GetSessionToken_section.html)
|
||||
More details on Assume Role settings from the CLI in [Assume Role](./role-assumption.md) page.
|
||||
|
||||
2. Copy the output containing:
|
||||
|
||||
- `AccessKeyId`
|
||||
- `SecretAccessKey`
|
||||
### AWS Profiles
|
||||
|
||||
> Sample output:
|
||||
```json
|
||||
{
|
||||
"Credentials": {
|
||||
"AccessKeyId": "ASIAIOSFODNN7EXAMPLE",
|
||||
"SecretAccessKey": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY",
|
||||
"SessionToken": "AQoEXAMPLEH4aoAH0gNCAPyJxz4BlCFFxWNE1OPTgk5TthT+FvwqnKwRcOIfrRh3c/LTo6UDdyJwOOvEVPvLXCrrrUtdnniCEXAMPLE/IvU1dYUg2RVAJBanLiHb4IgRmpRV3zrkuWJOgQs8IZZaIv2BXIa2R4OlgkBN9bkUDNCJiBeb/AXlzBBko7b15fjrBs2+cTQtpZ3CYWFXG8C5zqx37wnOE49mRl/+OtkIKGO7fAE",
|
||||
"Expiration": "2020-05-19T18:06:10+00:00"
|
||||
}
|
||||
}
|
||||
```
|
||||
To use a custom AWS profile, specify it with the following command:
|
||||
|
||||
> ⚠️ Save these credentials securely and paste them into the Prowler Cloud/App setup screen.
|
||||
```console
|
||||
prowler aws -p/--profile <profile_name>
|
||||
```
|
||||
|
||||
Complete the form in Prowler Cloud/App and click `Next`
|
||||
### Multi-Factor Authentication (MFA)
|
||||
|
||||

|
||||
For IAM entities requiring Multi-Factor Authentication (MFA), use the `--mfa` flag. Prowler prompts for the following values to initiate a new session:
|
||||
|
||||
Click `Launch Scan`
|
||||
|
||||

|
||||
- **ARN of your MFA device**
|
||||
- **TOTP (time-based one-time password)**
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# AWS Assume Role in Prowler
|
||||
# AWS Assume Role in Prowler (CLI)
|
||||
|
||||
## Authentication Overview
|
||||
|
||||
|
||||
@@ -1,70 +1,229 @@
|
||||
# Azure Authentication in Prowler
|
||||
|
||||
Prowler for Azure supports multiple authentication types. To use a specific method, pass the appropriate flag during execution:
|
||||
Prowler for Azure supports multiple authentication types. Authentication methods vary between Prowler App and Prowler CLI:
|
||||
|
||||
- [**Service Principal Application**](https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals?tabs=browser#service-principal-object) (**Recommended**)
|
||||
- Existing **AZ CLI credentials**
|
||||
- **Interactive browser authentication**
|
||||
- [**Managed Identity**](https://learn.microsoft.com/en-us/entra/identity/managed-identities-azure-resources/overview) authentication
|
||||
**Prowler App:**
|
||||
|
||||
> ⚠️ **Important:** For Prowler App, only Service Principal authentication is supported.
|
||||
- [**Service Principal Application**](#service-principal-application-authentication-recommended)
|
||||
|
||||
### Service Principal Application Authentication
|
||||
**Prowler CLI:**
|
||||
|
||||
Enable Prowler authentication using a Service Principal Application by setting up the following environment variables:
|
||||
- [**Service Principal Application**](#service-principal-application-authentication-recommended) (**Recommended**)
|
||||
- [**AZ CLI credentials**](#az-cli-authentication)
|
||||
- [**Interactive browser authentication**](#browser-authentication)
|
||||
- [**Managed Identity Authentication**](#managed-identity-authentication)
|
||||
|
||||
```console
|
||||
export AZURE_CLIENT_ID="XXXXXXXXX"
|
||||
export AZURE_TENANT_ID="XXXXXXXXX"
|
||||
export AZURE_CLIENT_SECRET="XXXXXXX"
|
||||
```
|
||||
|
||||
Execution with the `--sp-env-auth` flag fails if these variables are not set or exported.
|
||||
|
||||
Refer to the [Create Prowler Service Principal](create-prowler-service-principal.md) guide for detailed setup instructions.
|
||||
|
||||
### Azure Authentication Methods
|
||||
|
||||
Prowler for Azure supports the following authentication methods:
|
||||
|
||||
- **AZ CLI Authentication (`--az-cli-auth`)** – Automated authentication using stored AZ CLI credentials.
|
||||
- **Managed Identity Authentication (`--managed-identity-auth`)** – Automated authentication via Azure Managed Identity.
|
||||
- **Browser Authentication (`--browser-auth`)** – Requires the user to authenticate using the default browser. The `tenant-id` parameter is mandatory for this method.
|
||||
|
||||
### Required Permissions
|
||||
## Required Permissions
|
||||
|
||||
Prowler for Azure requires two types of permission scopes:
|
||||
|
||||
#### Microsoft Entra ID Permissions
|
||||
### Microsoft Entra ID Permissions
|
||||
|
||||
These permissions allow Prowler to retrieve metadata from the assumed identity and perform specific Entra checks. While not mandatory for execution, they enhance functionality.
|
||||
|
||||
Required permissions:
|
||||
#### Assigning Required API Permissions
|
||||
|
||||
Assign the following Microsoft Graph permissions:
|
||||
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All` (used for Entra multifactor authentication checks)
|
||||
- `UserAuthenticationMethod.Read.All` (optional, for multifactor authentication (MFA) checks)
|
||||
|
||||
???+ note
|
||||
Replace `Directory.Read.All` with `Domain.Read.All` for more restrictive permissions. Note that Entra checks related to DirectoryRoles and GetUsers will not run with this permission.
|
||||
???+ note
|
||||
Replace `Directory.Read.All` with `Domain.Read.All` for more restrictive permissions. Note that Entra checks related to DirectoryRoles and GetUsers will not run with this permission.
|
||||
|
||||
=== "Azure Portal"
|
||||
|
||||
1. Go to your App Registration > "API permissions"
|
||||
|
||||

|
||||
|
||||
2. Click "+ Add a permission" > "Microsoft Graph" > "Application permissions"
|
||||
|
||||

|
||||

|
||||
|
||||
3. Search and select:
|
||||
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All`
|
||||
|
||||

|
||||
|
||||
4. Click "Add permissions", then grant admin consent
|
||||
|
||||

|
||||
|
||||
=== "Azure CLI"
|
||||
|
||||
1. To grant permissions to a Service Principal, execute the following command in a terminal:
|
||||
|
||||
```console
|
||||
az ad app permission add --id {appId} --api 00000003-0000-0000-c000-000000000000 --api-permissions 7ab1d382-f21e-4acd-a863-ba3e13f7da61=Role 246dd0d5-5bd0-4def-940b-0421030a5b68=Role 38d9df27-64da-44fd-b7c5-a6fbac20248f=Role
|
||||
```
|
||||
|
||||
2. Once the permissions are assigned, admin consent is required to finalize the changes. An administrator should run:
|
||||
|
||||
```console
|
||||
az ad app permission admin-consent --id {appId}
|
||||
```
|
||||
|
||||
|
||||
#### Subscription Scope Permissions
|
||||
### Subscription Scope Permissions
|
||||
|
||||
These permissions are required to perform security checks against Azure resources. The following **RBAC roles** must be assigned per subscription to the entity used by Prowler:
|
||||
|
||||
- `Reader` – Grants read-only access to Azure resources.
|
||||
- `ProwlerRole` – A custom role with minimal permissions, defined in the [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json).
|
||||
- `ProwlerRole` – A custom role with minimal permissions needed for some specific checks, defined in the [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json).
|
||||
|
||||
???+ note
|
||||
The `assignableScopes` field in the JSON custom role file must be updated to reflect the correct subscription or management group. Use one of the following formats: `/subscriptions/<subscription-id>` or `/providers/Microsoft.Management/managementGroups/<management-group-id>`.
|
||||
|
||||
### Assigning Permissions
|
||||
#### Assigning "Reader" Role at the Subscription Level
|
||||
By default, Prowler scans all accessible subscriptions. If you need to audit specific subscriptions, you must assign the necessary role `Reader` for each one. For streamlined and less repetitive role assignments in multi-subscription environments, refer to the [following section](subscriptions.md#recommendation-for-managing-multiple-subscriptions).
|
||||
|
||||
To properly configure permissions, follow these guides:
|
||||
=== "Azure Portal"
|
||||
|
||||
1. To grant Prowler access to scan a specific Azure subscription, follow these steps in Azure Portal:
|
||||
Navigate to the subscription you want to audit with Prowler.
|
||||
|
||||
1. In the left menu, select “Access control (IAM)”.
|
||||
|
||||
2. Click “+ Add” and select “Add role assignment”.
|
||||
|
||||
3. In the search bar, enter `Reader`, select it and click “Next”.
|
||||
|
||||
4. In the “Members” tab, click “+ Select members”, then add the accounts to assign this role.
|
||||
|
||||
5. Click “Review + assign” to finalize and apply the role assignment.
|
||||
|
||||

|
||||
|
||||
=== "Azure CLI"
|
||||
|
||||
1. Open a terminal and execute the following command to assign the `Reader` role to the identity that is going to be assumed by Prowler:
|
||||
|
||||
```console
|
||||
az role assignment create --role "Reader" --assignee <user, group, or service principal> --scope /subscriptions/<subscription-id>
|
||||
```
|
||||
|
||||
2. If the command is executed successfully, the output is going to be similar to the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"condition": null,
|
||||
"conditionVersion": null,
|
||||
"createdBy": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"createdOn": "YYYY-MM-DDTHH:MM:SS.SSSSSS+00:00",
|
||||
"delegatedManagedIdentityResourceId": null,
|
||||
"description": null,
|
||||
"id": "/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX/providers/Microsoft.Authorization/roleAssignments/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"name": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"principalId": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"principalName": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"principalType": "ServicePrincipal",
|
||||
"roleDefinitionId": "/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX/providers/Microsoft.Authorization/roleDefinitions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"roleDefinitionName": "Reader",
|
||||
"scope": "/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"type": "Microsoft.Authorization/roleAssignments",
|
||||
"updatedBy": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"updatedOn": "YYYY-MM-DDTHH:MM:SS.SSSSSS+00:00"
|
||||
}
|
||||
```
|
||||
|
||||
#### Assigning "ProwlerRole" Permissions at the Subscription Level
|
||||
|
||||
Some read-only permissions required for specific security checks are not included in the built-in Reader role. To support these checks, Prowler utilizes a custom role, defined in [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json). Once created, this role can be assigned following the same process as the `Reader` role.
|
||||
|
||||
The checks requiring this `ProwlerRole` can be found in this [section](../../tutorials/azure/authentication.md#checks-requiring-prowlerrole).
|
||||
|
||||
=== "Azure Portal"
|
||||
|
||||
1. Download the [Prowler Azure Custom Role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json)
|
||||
|
||||

|
||||
|
||||
2. Modify `assignableScopes` to match your Subscription ID (e.g. `/subscriptions/xxxx-xxxx-xxxx-xxxx`)
|
||||
|
||||
3. Go to your Azure Subscription > "Access control (IAM)"
|
||||
|
||||

|
||||
|
||||
4. Click "+ Add" > "Add custom role", choose "Start from JSON" and upload the modified file
|
||||
|
||||

|
||||
|
||||
5. Click "Review + Create" to finish
|
||||
|
||||

|
||||
|
||||
6. Return to "Access control (IAM)" > "+ Add" > "Add role assignment"
|
||||
|
||||
- Assign the `Reader` role to the Application created in the previous step
|
||||
- Then repeat the same process assigning the custom `ProwlerRole`
|
||||
|
||||

|
||||
|
||||
???+ note
|
||||
The `assignableScopes` field in the JSON custom role file must be updated to reflect the correct subscription or management group. Use one of the following formats: `/subscriptions/<subscription-id>` or `/providers/Microsoft.Management/managementGroups/<management-group-id>`.
|
||||
|
||||
=== "Azure CLI"
|
||||
|
||||
1. To create a new custom role, open a terminal and execute the following command:
|
||||
|
||||
```console
|
||||
az role definition create --role-definition '{ 640ms lun 16 dic 17:04:17 2024
|
||||
"Name": "ProwlerRole",
|
||||
"IsCustom": true,
|
||||
"Description": "Role used for checks that require read-only access to Azure resources and are not covered by the Reader role.",
|
||||
"AssignableScopes": [
|
||||
"/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX" // USE YOUR SUBSCRIPTION ID
|
||||
],
|
||||
"Actions": [
|
||||
"Microsoft.Web/sites/host/listkeys/action",
|
||||
"Microsoft.Web/sites/config/list/Action"
|
||||
]
|
||||
}'
|
||||
```
|
||||
|
||||
2. If the command is executed successfully, the output is going to be similar to the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"assignableScopes": [
|
||||
"/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"
|
||||
],
|
||||
"createdBy": null,
|
||||
"createdOn": "YYYY-MM-DDTHH:MM:SS.SSSSSS+00:00",
|
||||
"description": "Role used for checks that require read-only access to Azure resources and are not covered by the Reader role.",
|
||||
"id": "/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX/providers/Microsoft.Authorization/roleDefinitions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"name": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"permissions": [
|
||||
{
|
||||
"actions": [
|
||||
"Microsoft.Web/sites/host/listkeys/action",
|
||||
"Microsoft.Web/sites/config/list/Action"
|
||||
],
|
||||
"condition": null,
|
||||
"conditionVersion": null,
|
||||
"dataActions": [],
|
||||
"notActions": [],
|
||||
"notDataActions": []
|
||||
}
|
||||
],
|
||||
"roleName": "ProwlerRole",
|
||||
"roleType": "CustomRole",
|
||||
"type": "Microsoft.Authorization/roleDefinitions",
|
||||
"updatedBy": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"updatedOn": "YYYY-MM-DDTHH:MM:SS.SSSSSS+00:00"
|
||||
}
|
||||
```
|
||||
|
||||
### Additional Resources
|
||||
|
||||
For more detailed guidance on subscription management and permissions:
|
||||
|
||||
- [Microsoft Entra ID permissions](create-prowler-service-principal.md#assigning-proper-permissions)
|
||||
- [Azure subscription permissions](subscriptions.md)
|
||||
- [Create Prowler Service Principal](create-prowler-service-principal.md)
|
||||
|
||||
???+ warning
|
||||
Some permissions in `ProwlerRole` involve **write access**. If a `ReadOnly` lock is attached to certain resources, you may encounter errors, and findings for those checks will not be available.
|
||||
@@ -75,3 +234,56 @@ The following security checks require the `ProwlerRole` permissions for executio
|
||||
|
||||
- `app_function_access_keys_configured`
|
||||
- `app_function_ftps_deployment_disabled`
|
||||
|
||||
---
|
||||
|
||||
## Service Principal Application Authentication (Recommended)
|
||||
|
||||
This method is required for Prowler App and recommended for Prowler CLI.
|
||||
|
||||
### Creating the Service Principal
|
||||
For more information, see [Creating Prowler Service Principal](create-prowler-service-principal.md).
|
||||
|
||||
### Environment Variables (CLI)
|
||||
|
||||
For Prowler CLI, set up the following environment variables:
|
||||
|
||||
```console
|
||||
export AZURE_CLIENT_ID="XXXXXXXXX"
|
||||
export AZURE_TENANT_ID="XXXXXXXXX"
|
||||
export AZURE_CLIENT_SECRET="XXXXXXX"
|
||||
```
|
||||
|
||||
Execution with the `--sp-env-auth` flag fails if these variables are not set or exported.
|
||||
|
||||
## AZ CLI Authentication
|
||||
|
||||
*Available only for Prowler CLI*
|
||||
|
||||
Use stored Azure CLI credentials:
|
||||
|
||||
```console
|
||||
prowler azure --az-cli-auth
|
||||
```
|
||||
|
||||
## Managed Identity Authentication
|
||||
|
||||
*Available only for Prowler CLI*
|
||||
|
||||
Authenticate via Azure Managed Identity (when running on Azure resources):
|
||||
|
||||
```console
|
||||
prowler azure --managed-identity-auth
|
||||
```
|
||||
|
||||
## Browser Authentication
|
||||
|
||||
*Available only for Prowler CLI*
|
||||
|
||||
Authenticate using the default browser:
|
||||
|
||||
```console
|
||||
prowler azure --browser-auth --tenant-id <tenant-id>
|
||||
```
|
||||
|
||||
> **Note:** The `tenant-id` parameter is mandatory for browser authentication.
|
||||
|
||||
@@ -2,26 +2,39 @@
|
||||
|
||||
To enable Prowler to assume an identity for scanning with the required privileges, a Service Principal must be created. This Service Principal authenticates against Azure and retrieves necessary metadata for checks.
|
||||
|
||||
### Methods for Creating a Service Principal
|
||||
|
||||
Service Principal Applications can be created using either the Azure Portal or the Azure CLI.
|
||||
|
||||
## Creating a Service Principal via Azure Portal / Entra Admin Center
|
||||
|
||||
1. Access Microsoft Entra ID.
|
||||
2. In the left menu bar, navigate to **"App registrations"**.
|
||||
3. Click **"+ New registration"** in the menu bar to register a new application
|
||||
4. Fill the **"Name"**, select the **"Supported account types"** and click **"Register"**. You will be redirected to the applications page.
|
||||
5. In the left menu bar, select **"Certificates & secrets"**.
|
||||
6. Under the **"Certificates & secrets"** view, click **"+ New client secret"**.
|
||||
7. Fill the **"Description"** and **"Expires"** fields, then click **"Add"**.
|
||||
8. Copy the secret value, as it will be used as `AZURE_CLIENT_SECRET` environment variable.
|
||||
|
||||

|
||||
|
||||
## From Azure CLI
|
||||
## Creating a Service Principal via Azure Portal / Entra Admin Center
|
||||
|
||||
### Creating a Service Principal
|
||||
1. Access **Microsoft Entra ID** in the [Azure Portal](https://portal.azure.com)
|
||||
|
||||

|
||||
|
||||
2. Navigate to "Manage" > "App registrations"
|
||||
|
||||

|
||||
|
||||
3. Click "+ New registration", complete the form, and click "Register"
|
||||
|
||||

|
||||
|
||||
4. Go to "Certificates & secrets" > "+ New client secret"
|
||||
|
||||

|
||||

|
||||
|
||||
5. Fill in the required fields and click "Add", then copy the generated value
|
||||
|
||||
| Value | Description |
|
||||
|-------|-----------|
|
||||
| Client ID | Application ID |
|
||||
| Client Secret | Secret to Connect to the App |
|
||||
| Tenant ID | Microsoft Entra Tenant ID |
|
||||
|
||||
|
||||
## Creating a Service Principal from Azure CLI
|
||||
|
||||
To create a Service Principal using the Azure CLI, follow these steps:
|
||||
|
||||
@@ -46,55 +59,4 @@ To create a Service Principal using the Azure CLI, follow these steps:
|
||||
|
||||
## Assigning Proper Permissions
|
||||
|
||||
To allow Prowler to retrieve metadata from the assumed identity and run Entra checks, assign the following permissions:
|
||||
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All` (used only for the Entra checks related with multifactor authentication)
|
||||
|
||||
Permissions can be assigned via the Azure Portal or the Azure CLI.
|
||||
|
||||
???+ note
|
||||
After creating and assigning the necessary Entra permissions, follow this [tutorial](../azure/subscriptions.md) to add subscription permissions to the application and start scanning your resources.
|
||||
|
||||
### Assigning the Reader Role in Azure Portal
|
||||
|
||||
1. Access Microsoft Entra ID.
|
||||
|
||||
2. In the left menu bar, navigate to “App registrations”.
|
||||
|
||||
3. Select the created application.
|
||||
|
||||
4. In the left menu bar, select “API permissions”.
|
||||
|
||||
5. Click “+ Add a permission” and select “Microsoft Graph”.
|
||||
|
||||
6. In the “Microsoft Graph” view, select “Application permissions”.
|
||||
|
||||
7. Finally, search for "Directory", "Policy" and "UserAuthenticationMethod" select the following permissions:
|
||||
|
||||
- `Directory.Read.All`
|
||||
|
||||
- `Policy.Read.All`
|
||||
|
||||
- `UserAuthenticationMethod.Read.All`
|
||||
|
||||
8. Click “Add permissions” to apply the new permissions.
|
||||
|
||||
9. Finally, an admin must click “Grant admin consent for \[your tenant]” to apply the permissions.
|
||||
|
||||

|
||||
|
||||
### From Azure CLI
|
||||
|
||||
1. To grant permissions to a Service Principal, execute the following command in a terminal:
|
||||
|
||||
```console
|
||||
az ad app permission add --id {appId} --api 00000003-0000-0000-c000-000000000000 --api-permissions 7ab1d382-f21e-4acd-a863-ba3e13f7da61=Role 246dd0d5-5bd0-4def-940b-0421030a5b68=Role 38d9df27-64da-44fd-b7c5-a6fbac20248f=Role
|
||||
```
|
||||
|
||||
2. Once the permissions are assigned, admin consent is required to finalize the changes. An administrator should run:
|
||||
|
||||
```console
|
||||
az ad app permission admin-consent --id {appId}
|
||||
```
|
||||
Go to [Assigning Proper Permissions](./authentication.md#required-permissions) to learn how to assign the necessary permissions to the Service Principal.
|
||||
@@ -1,31 +1,23 @@
|
||||
# Getting Started with Azure on Prowler Cloud/App
|
||||
# Getting Started With Azure on Prowler
|
||||
|
||||
## Prowler App
|
||||
|
||||
<iframe width="560" height="380" src="https://www.youtube-nocookie.com/embed/v1as8vTFlMg" title="Prowler Cloud Onboarding Azure" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen="1"></iframe>
|
||||
> Walkthrough video onboarding an Azure Subscription using Service Principal.
|
||||
|
||||
Set up your Azure subscription to enable security scanning using Prowler Cloud/App.
|
||||
|
||||
???+ note "Government Cloud Support"
|
||||
Government cloud subscriptions (Azure Government) are not currently supported, but we expect to add support for them in the near future.
|
||||
|
||||
## Requirements
|
||||
### Prerequisites
|
||||
|
||||
To configure your Azure subscription, you’ll need:
|
||||
Before setting up Azure in Prowler App, you need to create a Service Principal with proper permissions.
|
||||
|
||||
1. Get the `Subscription ID`
|
||||
2. Access to Prowler Cloud/App
|
||||
3. Configure authentication in Azure:
|
||||
|
||||
3.1 Create a Service Principal
|
||||
|
||||
3.2 Assign required permissions
|
||||
|
||||
3.3 Assign permissions at the subscription level
|
||||
|
||||
4. Add the credentials to Prowler Cloud/App
|
||||
For detailed instructions on how to create the Service Principal and configure permissions, see [Authentication > Service Principal](./authentication.md#service-principal-application-authentication-recommended).
|
||||
|
||||
---
|
||||
|
||||
## Step 1: Get the Subscription ID
|
||||
### Step 1: Get the Subscription ID
|
||||
|
||||
1. Go to the [Azure Portal](https://portal.azure.com/#home) and search for `Subscriptions`
|
||||
2. Locate and copy your Subscription ID
|
||||
@@ -35,9 +27,9 @@ To configure your Azure subscription, you’ll need:
|
||||
|
||||
---
|
||||
|
||||
## Step 2: Access Prowler Cloud/App
|
||||
### Step 2: Access Prowler App
|
||||
|
||||
1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](../prowler-app.md)
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](../prowler-app.md)
|
||||
2. Navigate to `Configuration` > `Cloud Providers`
|
||||
|
||||

|
||||
@@ -54,117 +46,19 @@ To configure your Azure subscription, you’ll need:
|
||||
|
||||

|
||||
|
||||
---
|
||||
### Step 3: Add Credentials to Prowler App
|
||||
|
||||
## Step 3: Configure the Azure Subscription
|
||||
|
||||
### Create the Service Principal
|
||||
|
||||
A Service Principal is required to grant Prowler the necessary privileges.
|
||||
|
||||
1. Access **Microsoft Entra ID**
|
||||
|
||||

|
||||
|
||||
2. Navigate to `Manage` > `App registrations`
|
||||
|
||||

|
||||
|
||||
3. Click `+ New registration`, complete the form, and click `Register`
|
||||
|
||||

|
||||
|
||||
4. Go to `Certificates & secrets` > `+ New client secret`
|
||||
|
||||

|
||||

|
||||
|
||||
5. Fill in the required fields and click `Add`, then copy the generated value
|
||||
|
||||
| Value | Description |
|
||||
|-------|-------------|
|
||||
| Client ID | Application ID |
|
||||
| Client Secret | AZURE_CLIENT_SECRET |
|
||||
| Tenant ID | Azure Active Directory tenant ID |
|
||||
|
||||
---
|
||||
|
||||
### Assign Required API Permissions
|
||||
|
||||
Assign the following Microsoft Graph permissions:
|
||||
|
||||
- Directory.Read.All
|
||||
|
||||
- Policy.Read.All
|
||||
|
||||
- UserAuthenticationMethod.Read.All (optional, for MFA checks)
|
||||
|
||||
???+ note
|
||||
You can replace `Directory.Read.All` with `Domain.Read.All` that is a more restrictive permission but you won't be able to run the Entra checks related with DirectoryRoles and GetUsers.
|
||||
|
||||
1. Go to your App Registration > `API permissions`
|
||||
|
||||

|
||||
|
||||
2. Click `+ Add a permission` > `Microsoft Graph` > `Application permissions`
|
||||
|
||||

|
||||

|
||||
|
||||
3. Search and select:
|
||||
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All`
|
||||
|
||||

|
||||
|
||||
4. Click `Add permissions`, then grant admin consent
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
### Assign Permissions at the Subscription Level
|
||||
|
||||
1. Download the [Prowler Azure Custom Role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json)
|
||||
|
||||

|
||||
|
||||
2. Modify `assignableScopes` to match your Subscription ID (e.g. `/subscriptions/xxxx-xxxx-xxxx-xxxx`)
|
||||
|
||||
3. Go to your Azure Subscription > `Access control (IAM)`
|
||||
|
||||

|
||||
|
||||
4. Click `+ Add` > `Add custom role`, choose "Start from JSON" and upload the modified file
|
||||
|
||||

|
||||
|
||||
5. Click `Review + Create` to finish
|
||||
|
||||

|
||||
|
||||
6. Return to `Access control (IAM)` > `+ Add` > `Add role assignment`
|
||||
|
||||
- Assign the `Reader` role to the Application created in the previous step
|
||||
- Then repeat the same process assigning the custom `ProwlerRole`
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## Step 4: Add Credentials to Prowler Cloud/App
|
||||
Having completed the [Service Principal setup from the Authentication guide](./authentication.md#service-principal-application-authentication-recommended):
|
||||
|
||||
1. Go to your App Registration overview and copy the `Client ID` and `Tenant ID`
|
||||
|
||||

|
||||
|
||||
2. Go to Prowler Cloud/App and paste:
|
||||
2. Go to Prowler App and paste:
|
||||
|
||||
- `Client ID`
|
||||
- `Tenant ID`
|
||||
- `AZURE_CLIENT_SECRET` from earlier
|
||||
- `Client Secret` from [earlier](./authentication.md#service-principal-application-authentication-recommended)
|
||||
|
||||

|
||||
|
||||
@@ -172,6 +66,70 @@ Assign the following Microsoft Graph permissions:
|
||||
|
||||

|
||||
|
||||
4. Click `Launch Scan`
|
||||
4. Click "Launch Scan"
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
### Configure Azure Credentials
|
||||
|
||||
To authenticate with Azure, Prowler CLI supports multiple authentication methods. Choose the method that best suits your environment.
|
||||
|
||||
For detailed authentication setup instructions, see [Authentication](./authentication.md).
|
||||
|
||||
**Service Principal (Recommended)**
|
||||
|
||||
Set up environment variables:
|
||||
|
||||
```console
|
||||
export AZURE_CLIENT_ID="XXXXXXXXX"
|
||||
export AZURE_TENANT_ID="XXXXXXXXX"
|
||||
export AZURE_CLIENT_SECRET="XXXXXXX"
|
||||
```
|
||||
|
||||
Then run:
|
||||
|
||||
```console
|
||||
prowler azure --sp-env-auth
|
||||
```
|
||||
|
||||
**Azure CLI Credentials**
|
||||
|
||||
Use stored Azure CLI credentials:
|
||||
|
||||
```console
|
||||
prowler azure --az-cli-auth
|
||||
```
|
||||
|
||||
**Browser Authentication**
|
||||
|
||||
Authenticate using your default browser:
|
||||
|
||||
```console
|
||||
prowler azure --browser-auth --tenant-id <tenant-id>
|
||||
```
|
||||
|
||||
**Managed Identity**
|
||||
|
||||
When running on Azure resources:
|
||||
|
||||
```console
|
||||
prowler azure --managed-identity-auth
|
||||
```
|
||||
|
||||
### Subscription Selection
|
||||
|
||||
To scan a specific Azure subscription:
|
||||
|
||||
```console
|
||||
prowler azure --subscription-ids <subscription-id>
|
||||
```
|
||||
|
||||
To scan multiple Azure subscriptions:
|
||||
|
||||
```console
|
||||
prowler azure --subscription-ids <subscription-id1> <subscription-id2> <subscription-id3>
|
||||
```
|
||||
|
||||
@@ -18,131 +18,7 @@ Prowler allows you to specify one or more subscriptions for scanning (up to N),
|
||||
The multi-subscription feature is available only in the CLI. In Prowler App, each scan is limited to a single subscription.
|
||||
|
||||
## Assigning Permissions for Subscription Scans
|
||||
|
||||
To perform scans, ensure that the identity assumed by Prowler has the appropriate permissions.
|
||||
|
||||
By default, Prowler scans all accessible subscriptions. If you need to audit specific subscriptions, you must assign the necessary role `Reader` for each one. For streamlined and less repetitive role assignments in multi-subscription environments, refer to the [following section](#recommendation-for-managing-multiple-subscriptions).
|
||||
|
||||
### Assigning the Reader Role in Azure Portal
|
||||
|
||||
1. To grant Prowler access to scan a specific Azure subscription, follow these steps in Azure Portal:
|
||||
Navigate to the subscription you want to audit with Prowler.
|
||||
|
||||
2. In the left menu, select “Access control (IAM)”.
|
||||
|
||||
3. Click “+ Add” and select “Add role assignment”.
|
||||
|
||||
4. In the search bar, enter `Reader`, select it and click “Next”.
|
||||
|
||||
5. In the “Members” tab, click “+ Select members”, then add the accounts to assign this role.
|
||||
|
||||
6. Click “Review + assign” to finalize and apply the role assignment.
|
||||
|
||||

|
||||
|
||||
### From Azure CLI
|
||||
|
||||
1. Open a terminal and execute the following command to assign the `Reader` role to the identity that is going to be assumed by Prowler:
|
||||
|
||||
```console
|
||||
az role assignment create --role "Reader" --assignee <user, group, or service principal> --scope /subscriptions/<subscription-id>
|
||||
```
|
||||
|
||||
2. If the command is executed successfully, the output is going to be similar to the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"condition": null,
|
||||
"conditionVersion": null,
|
||||
"createdBy": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"createdOn": "YYYY-MM-DDTHH:MM:SS.SSSSSS+00:00",
|
||||
"delegatedManagedIdentityResourceId": null,
|
||||
"description": null,
|
||||
"id": "/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX/providers/Microsoft.Authorization/roleAssignments/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"name": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"principalId": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"principalName": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"principalType": "ServicePrincipal",
|
||||
"roleDefinitionId": "/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX/providers/Microsoft.Authorization/roleDefinitions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"roleDefinitionName": "Reader",
|
||||
"scope": "/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"type": "Microsoft.Authorization/roleAssignments",
|
||||
"updatedBy": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"updatedOn": "YYYY-MM-DDTHH:MM:SS.SSSSSS+00:00"
|
||||
}
|
||||
```
|
||||
|
||||
### Prowler Custom Role
|
||||
|
||||
Some read-only permissions required for specific security checks are not included in the built-in Reader role. To support these checks, Prowler utilizes a custom role, defined in [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json). Once created, this role can be assigned following the same process as the `Reader` role.
|
||||
|
||||
The checks requiring this `ProwlerRole` can be found in this [section](../../tutorials/azure/authentication.md#checks-requiring-prowlerrole).
|
||||
|
||||
#### Create ProwlerRole via Azure Portal
|
||||
|
||||
1. Download the [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json) file and modify the `assignableScopes` field to match the target subscription. Example format: `/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX`.
|
||||
|
||||
2. Access your Azure subscription.
|
||||
|
||||
3. Select “Access control (IAM)”.
|
||||
|
||||
4. Click “+ Add” and select “Add custom role”.
|
||||
|
||||
5. Under “Baseline permissions”, select “Start from JSON” and upload the modified role file.
|
||||
|
||||
6. Click “Review + create” to finalize the role creation.
|
||||
|
||||
#### Create ProwlerRole via Azure CLI
|
||||
|
||||
1. To create a new custom role, open a terminal and execute the following command:
|
||||
|
||||
```console
|
||||
az role definition create --role-definition '{ 640ms lun 16 dic 17:04:17 2024
|
||||
"Name": "ProwlerRole",
|
||||
"IsCustom": true,
|
||||
"Description": "Role used for checks that require read-only access to Azure resources and are not covered by the Reader role.",
|
||||
"AssignableScopes": [
|
||||
"/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX" // USE YOUR SUBSCRIPTION ID
|
||||
],
|
||||
"Actions": [
|
||||
"Microsoft.Web/sites/host/listkeys/action",
|
||||
"Microsoft.Web/sites/config/list/Action"
|
||||
]
|
||||
}'
|
||||
```
|
||||
|
||||
2. If the command is executed successfully, the output is going to be similar to the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"assignableScopes": [
|
||||
"/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"
|
||||
],
|
||||
"createdBy": null,
|
||||
"createdOn": "YYYY-MM-DDTHH:MM:SS.SSSSSS+00:00",
|
||||
"description": "Role used for checks that require read-only access to Azure resources and are not covered by the Reader role.",
|
||||
"id": "/subscriptions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX/providers/Microsoft.Authorization/roleDefinitions/XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"name": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"permissions": [
|
||||
{
|
||||
"actions": [
|
||||
"Microsoft.Web/sites/host/listkeys/action",
|
||||
"Microsoft.Web/sites/config/list/Action"
|
||||
],
|
||||
"condition": null,
|
||||
"conditionVersion": null,
|
||||
"dataActions": [],
|
||||
"notActions": [],
|
||||
"notDataActions": []
|
||||
}
|
||||
],
|
||||
"roleName": "ProwlerRole",
|
||||
"roleType": "CustomRole",
|
||||
"type": "Microsoft.Authorization/roleDefinitions",
|
||||
"updatedBy": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
"updatedOn": "YYYY-MM-DDTHH:MM:SS.SSSSSS+00:00"
|
||||
}
|
||||
```
|
||||
Check the [Authentication > Subscription Scope Permissions](authentication.md#subscription-scope-permissions) guide for more information on how to assign permissions for subscription scans.
|
||||
|
||||
## Recommendation for Managing Multiple Subscriptions
|
||||
|
||||
|
||||
@@ -0,0 +1,483 @@
|
||||
# Prowler ThreatScore Documentation
|
||||
|
||||
## Introduction
|
||||
|
||||
The **Prowler ThreatScore** is a comprehensive compliance scoring system that provides a unified metric for assessing your organization's security posture across compliance frameworks. It aggregates findings from individual security checks into a single, normalized score ranging from 0 to 100.
|
||||
|
||||
### Purpose
|
||||
- **Unified View**: Get a single metric representing overall compliance health
|
||||
- **Risk Prioritization**: Understand which areas pose the highest security risks
|
||||
- **Progress Tracking**: Monitor improvements in compliance posture over time
|
||||
- **Executive Reporting**: Provide clear, quantifiable security metrics to stakeholders
|
||||
|
||||
## How ThreatScore Works
|
||||
|
||||
The ThreatScore calculation considers four critical factors for each compliance requirement:
|
||||
|
||||
### 1. Pass Rate (`rate_i`)
|
||||
The percentage of security checks that passed for a specific requirement:
|
||||
```
|
||||
Pass Rate = (Number of PASS findings) / (Total findings)
|
||||
```
|
||||
|
||||
### 2. Total Findings (`total_i`)
|
||||
The total number of checks performed (both PASS and FAIL) for a requirement. This represents the amount of evidence available - more findings provide greater confidence in the assessment.
|
||||
|
||||
### 3. Weight (`weight_i`)
|
||||
A numerical value (1-1000) representing the business importance or criticality of the requirement within your organization's context.
|
||||
|
||||
### 4. Risk Level (`risk_i`)
|
||||
A severity rating (1-5) indicating the potential impact of non-compliance with this requirement.
|
||||
|
||||
## Score Interpretation Guidelines
|
||||
|
||||
| ThreatScore | Interpretation | Recommended Actions |
|
||||
|------------------|----------------|-------------------|
|
||||
| 90-100% | Excellent | Maintain current controls, focus on continuous improvement |
|
||||
| 80-89% | Good | Address remaining gaps, prepare for compliance audits |
|
||||
| 70-79% | Acceptable | Prioritize high-risk failures, develop improvement plan |
|
||||
| 60-69% | Needs Improvement | Immediate attention required, may not pass compliance audit |
|
||||
| Below 60% | Critical | Emergency response needed, potential regulatory issues |
|
||||
|
||||
## Mathematical Formula
|
||||
|
||||
The ThreatScore uses a weighted average formula that accounts for all four factors:
|
||||
|
||||
```
|
||||
ThreatScore = (Σ(rate_i × total_i × weight_i × risk_i) / Σ(total_i × weight_i × risk_i)) × 100
|
||||
```
|
||||
|
||||
### Formula Properties
|
||||
- **Normalization**: Always produces a score between 0 and 100
|
||||
- **Evidence-weighted**: Requirements with more findings have proportionally greater influence
|
||||
- **Risk-sensitive**: Higher risk requirements impact the score more significantly
|
||||
- **Business-aligned**: Weight values allow customization based on organizational priorities
|
||||
|
||||
## Parameters Explained
|
||||
|
||||
### Weight Values (1-1000)
|
||||
|
||||
The weight parameter allows customization of ThreatScore calculation based on organizational priorities and regulatory requirements.
|
||||
|
||||
#### Weight Assignment Guidelines
|
||||
|
||||
| Weight Range | Priority Level | Use Cases |
|
||||
|--------------|----------------|-----------|
|
||||
| 1-100 | Low | Optional or nice-to-have controls |
|
||||
| 101-300 | Medium | Standard security practices |
|
||||
| 301-600 | High | Important security controls |
|
||||
| 601-850 | Critical | Regulatory compliance requirements |
|
||||
| 851-1000 | Maximum | Mission-critical security controls |
|
||||
|
||||
#### Weight Selection Strategy
|
||||
1. **Regulatory Mapping**: Assign higher weights to controls required by industry regulations
|
||||
2. **Business Impact**: Consider the potential business impact of control failures
|
||||
3. **Risk Tolerance**: Align weights with organizational risk appetite
|
||||
4. **Stakeholder Input**: Involve compliance and business teams in weight decisions
|
||||
|
||||
### Risk Levels (1-5)
|
||||
|
||||
Risk levels represent the potential security impact of non-compliance with a requirement.
|
||||
|
||||
| Risk Level | Severity | Impact Description |
|
||||
|------------|----------|-------------------|
|
||||
| 1 | Very Low | Minimal security impact, informational |
|
||||
| 2 | Low | Limited exposure, low probability of exploitation |
|
||||
| 3 | Medium | Moderate security risk, potential for limited damage |
|
||||
| 4 | High | Significant security risk, high probability of impact |
|
||||
| 5 | Critical | Severe security risk, immediate threat to organization |
|
||||
|
||||
#### Risk Level Assessment Criteria
|
||||
- **Confidentiality Impact**: Data exposure potential
|
||||
- **Integrity Impact**: Risk of unauthorized data modification
|
||||
- **Availability Impact**: Service disruption potential
|
||||
- **Compliance Impact**: Regulatory violation consequences
|
||||
- **Exploitability**: Ease of exploitation by threat actors
|
||||
|
||||
## Security Pillars and Subpillars
|
||||
|
||||
Prowler organizes security requirements into a hierarchical structure of pillars and subpillars, providing a comprehensive framework for security assessment and compliance evaluation.
|
||||
|
||||
### Security Pillars Overview
|
||||
|
||||
The ThreatScore calculation considers requirements organized within the following security pillars:
|
||||
|
||||
#### 1. IAM (Identity and Access Management)
|
||||
|
||||
**Purpose**: Controls who can access what resources and under what conditions
|
||||
|
||||
**Subpillars**:
|
||||
|
||||
- **1.1 Authentication**: Verifying user and system identities
|
||||
- **1.2 Authorization**: Controlling access to resources based on authenticated identity
|
||||
- **1.3 Privilege Escalation**: Preventing unauthorized elevation of permissions
|
||||
|
||||
#### 2. Attack Surface
|
||||
|
||||
**Purpose**: Minimizing exposure points that could be exploited by threat actors across network, storage, and application layers
|
||||
|
||||
**Subpillars**:
|
||||
|
||||
- **2.1 Network**: Network infrastructure security, segmentation, firewall rules, VPC configurations, and traffic controls
|
||||
- **2.2 Storage**: Data storage systems security, database security, file system permissions, backup security, and storage encryption
|
||||
- **2.3 Application**: Application-level controls and configurations, application security settings, code security, runtime protections
|
||||
|
||||
#### 3. Logging and Monitoring
|
||||
|
||||
**Purpose**: Ensuring comprehensive visibility and audit capabilities
|
||||
|
||||
**Subpillars**:
|
||||
|
||||
- **3.1 Logging**: Capturing security-relevant events and activities
|
||||
- **3.2 Retention**: Maintaining logs for appropriate time periods
|
||||
- **3.3 Monitoring**: Active surveillance and alerting on security events
|
||||
|
||||
#### 4. Encryption
|
||||
|
||||
**Purpose**: Protecting data confidentiality through cryptographic controls
|
||||
|
||||
**Subpillars**:
|
||||
|
||||
- **4.1 In-Transit**: Encrypting data during transmission
|
||||
- **4.2 At-Rest**: Encrypting stored data
|
||||
|
||||
### Pillar Hierarchy and ThreatScore Impact
|
||||
|
||||
#### Hierarchy Structure
|
||||
```
|
||||
Security Framework
|
||||
├── 1. IAM
|
||||
│ ├── 1.1 Authentication
|
||||
│ ├── 1.2 Authorization
|
||||
│ └── 1.3 Privilege Escalation
|
||||
├── 2. Attack Surface
|
||||
│ ├── 2.1 Network
|
||||
│ ├── 2.2 Storage
|
||||
│ └── 2.3 Application
|
||||
├── 3. Logging and Monitoring
|
||||
│ ├── 3.1 Logging
|
||||
│ ├── 3.2 Retention
|
||||
│ └── 3.3 Monitoring
|
||||
└── 4. Encryption
|
||||
├── 4.1 In-Transit
|
||||
└── 4.2 At-Rest
|
||||
|
||||
Example Requirement Structure:
|
||||
├── Pillar: 1. IAM
|
||||
│ ├── Subpillar: 1.1 Authentication
|
||||
│ │ ├── Requirement: MFA Implementation
|
||||
│ │ │ ├── Check 1: Admin accounts use MFA
|
||||
│ │ │ ├── Check 2: Regular users use MFA
|
||||
│ │ │ └── Check 3: Service accounts use MFA
|
||||
│ │ └── [Additional Requirements]
|
||||
│ └── [Additional Subpillars: Authorization, Privilege Escalation]
|
||||
```
|
||||
|
||||
#### Weight and Risk Assignment by Pillar
|
||||
|
||||
Different pillars typically receive different weight and risk assignments based on their security impact:
|
||||
|
||||
| Pillar | Typical Weight Range | Typical Risk Range | Rationale |
|
||||
|--------|---------------------|-------------------|-----------|
|
||||
| 1. IAM | 800-1000 | 4-5 | Critical for access control, high impact if compromised |
|
||||
| 2. Attack Surface | 500-900 | 3-5 | Highly dependent on exposure and criticality across network, storage, and application layers |
|
||||
| 3. Logging and Monitoring | 600-800 | 3-4 | Important for detection and compliance, moderate direct impact |
|
||||
| 4. Encryption | 700-950 | 4-5 | Essential for data protection, regulatory compliance |
|
||||
|
||||
**Subpillar Weight Considerations**:
|
||||
|
||||
- **2.1 Network (Attack Surface)**: 500-800, Risk 3-4 - Network perimeter defense
|
||||
- **2.2 Storage (Attack Surface)**: 600-900, Risk 4-5 - Data exposure impact
|
||||
- **2.3 Application (Attack Surface)**: 400-700, Risk 2-4 - Varies by application criticality
|
||||
|
||||
### Pillar-Specific Scoring Considerations
|
||||
|
||||
#### High-Impact Pillars (1. IAM, 4. Encryption)
|
||||
|
||||
- **Characteristics**: Direct impact on data protection and access control
|
||||
- **ThreatScore Impact**: Failures in these pillars significantly lower overall score
|
||||
- **Weight Strategy**: Assign maximum weights (800-1000) to critical requirements
|
||||
- **Risk Strategy**: Most requirements rated 4-5 due to severe consequences
|
||||
|
||||
#### Variable-Impact Pillar (2. Attack Surface)
|
||||
|
||||
- **Characteristics**: Impact varies significantly across subpillars (Network, Storage, Application)
|
||||
- **ThreatScore Impact**: Depends on specific subpillar and business context
|
||||
- **Weight Strategy**:
|
||||
- 2.1 Network subpillar: 500-800 (perimeter defense importance)
|
||||
- 2.2 Storage subpillar: 600-900 (data exposure risk)
|
||||
- 2.3 Application subpillar: 400-700 (application-specific criticality)
|
||||
- **Risk Strategy**: Wide range (2-5) based on exposure, data sensitivity, and business criticality
|
||||
|
||||
#### Monitoring Pillar (3. Logging and Monitoring)
|
||||
|
||||
- **Characteristics**: Essential for compliance and incident response
|
||||
- **ThreatScore Impact**: Moderate influence, critical for audit requirements
|
||||
- **Weight Strategy**: Consistent weights (600-800) across logging, retention, and monitoring subpillars
|
||||
- **Risk Strategy**: Moderate risk levels (3-4) with emphasis on compliance impact
|
||||
|
||||
### Cross-Pillar Dependencies
|
||||
|
||||
#### Authentication ↔ Authorization (IAM)
|
||||
|
||||
- Strong authentication enables effective authorization controls
|
||||
- Weight both subpillars highly as they're interdependent
|
||||
|
||||
#### Logging ↔ Monitoring (Logging and Monitoring)
|
||||
|
||||
- Logging provides the data that monitoring systems analyze
|
||||
- Balance weights to ensure both data collection and analysis are prioritized
|
||||
|
||||
#### In-Transit ↔ At-Rest (Encryption)
|
||||
|
||||
- Comprehensive data protection requires both encryption types
|
||||
- Consider data flow patterns when assigning relative weights
|
||||
|
||||
### Pillar Coverage in ThreatScore
|
||||
|
||||
#### Complete Coverage Benefits
|
||||
|
||||
- **Comprehensive Assessment**: All security domains represented in score
|
||||
- **Balanced View**: Prevents over-emphasis on single security aspect
|
||||
- **Regulatory Alignment**: Covers requirements across major compliance frameworks
|
||||
|
||||
#### Partial Coverage Considerations
|
||||
|
||||
- **Focused Assessment**: Target specific security domains
|
||||
- **Resource Optimization**: Concentrate efforts on high-priority areas
|
||||
- **Gradual Implementation**: Phase in additional pillars over time
|
||||
|
||||
## Scoring Examples
|
||||
|
||||
### Example 1: Basic Two-Requirement Scenario
|
||||
|
||||
Consider a compliance framework with two requirements:
|
||||
|
||||
**Requirement 1: Encryption at Rest**
|
||||
|
||||
- Findings: 200 PASS, 500 FAIL (total = 700)
|
||||
- Pass Rate: 200/700 = 0.286 (28.6%)
|
||||
- Weight: 500 (High priority - data protection)
|
||||
- Risk Level: 4 (High risk - data exposure)
|
||||
|
||||
**Requirement 2: Access Logging**
|
||||
|
||||
- Findings: 300 PASS, 100 FAIL (total = 400)
|
||||
- Pass Rate: 300/400 = 0.75 (75%)
|
||||
- Weight: 800 (Critical for audit compliance)
|
||||
- Risk Level: 3 (Medium risk - audit trail)
|
||||
|
||||
**Calculation:**
|
||||
```
|
||||
Numerator = (0.286 × 700 × 500 × 4) + (0.75 × 400 × 800 × 3)
|
||||
= (400,400) + (720,000)
|
||||
= 1,120,400
|
||||
|
||||
Denominator = (700 × 500 × 4) + (400 × 800 × 3)
|
||||
= 1,400,000 + 960,000
|
||||
= 2,360,000
|
||||
|
||||
ThreatScore = (1,120,400 / 2,360,000) × 100 = 47.5%
|
||||
```
|
||||
|
||||
### Example 2: Enterprise Scenario with Pillar Structure
|
||||
|
||||
This example demonstrates how pillar organization affects ThreatScore calculation:
|
||||
|
||||
| Pillar | Subpillar | Requirement | Pass | Fail | Total | Weight | Risk | Pass Rate |
|
||||
|--------|-----------|-------------|------|------|-------|--------|------|-----------|
|
||||
| 1. IAM | 1.2 Authorization | Access Controls | 280 | 120 | 400 | 800 | 4 | 70% |
|
||||
| 2. Attack Surface | 2.1 Network | Network Segmentation | 150 | 50 | 200 | 750 | 4 | 75% |
|
||||
| 2. Attack Surface | 2.2 Storage | Backup Security | 200 | 100 | 300 | 600 | 3 | 66.7% |
|
||||
| 3. Logging and Monitoring | 3.1 Logging | Audit Logging | 350 | 50 | 400 | 700 | 3 | 87.5% |
|
||||
| 4. Encryption | 4.2 At-Rest | Encryption | 450 | 50 | 500 | 950 | 5 | 90% |
|
||||
|
||||
**Step-by-step Calculation:**
|
||||
|
||||
1. **Calculate weighted contributions for each requirement:**
|
||||
|
||||
```
|
||||
Numerator = Σ(rate_i × total_i × weight_i × risk_i)
|
||||
```
|
||||
|
||||
- **Access Controls (1.2 Authorization)**: 0.70 × 400 × 800 × 4 = 896,000
|
||||
- **Network Segmentation (2.1 Network)**: 0.75 × 200 × 750 × 4 = 450,000
|
||||
- **Backup Security (2.2 Storage)**: 0.667 × 300 × 600 × 3 = 360,060
|
||||
- **Audit Logging (3.1 Logging)**: 0.875 × 400 × 700 × 3 = 735,000
|
||||
- **Encryption (4.2 At-Rest)**: 0.90 × 500 × 950 × 5 = 2,137,500
|
||||
|
||||
2. **Sum numerator:** 2,137,500 + 896,000 + 735,000 + 360,060 + 450,000 = **4,578,560**
|
||||
|
||||
3. **Calculate total weights for each requirement:**
|
||||
|
||||
```
|
||||
Denominator = Σ(total_i × weight_i × risk_i)
|
||||
```
|
||||
|
||||
- **Access Controls (1.2 Authorization)**: 400 × 800 × 4 = 1,280,000
|
||||
- **Network Segmentation (2.1 Network)**: 200 × 750 × 4 = 600,000
|
||||
- **Backup Security (2.2 Storage)**: 300 × 600 × 3 = 540,000
|
||||
- **Audit Logging (3.1 Logging)**: 400 × 700 × 3 = 840,000
|
||||
- **Encryption (4.2 At-Rest)**: 500 × 950 × 5 = 2,375,000
|
||||
|
||||
4. **Sum denominator:** 2,375,000 + 1,280,000 + 840,000 + 540,000 + 600,000 = **5,635,000**
|
||||
|
||||
5. **Final ThreatScore calculation:**
|
||||
|
||||
```
|
||||
ThreatScore = (Numerator / Denominator) × 100
|
||||
ThreatScore = (4,578,560 / 5,635,000) × 100 = 81.2%
|
||||
```
|
||||
|
||||
**Pillar-Level Analysis:**
|
||||
|
||||
- **1. IAM pillar (1.2 Authorization)**: Significant impact despite lower pass rate (70%) due to high weight (800)
|
||||
- **2. Attack Surface pillar (2.1 Network)**: Strong performance (75%) with high weight (750) balances the score
|
||||
- **2. Attack Surface pillar (2.2 Storage)**: Lowest performance (66.7%) but limited impact due to moderate weight (600)
|
||||
- **3. Logging and Monitoring pillar (3.1 Logging)**: Moderate contribution with good performance (87.5%)
|
||||
- **4. Encryption pillar (4.2 At-Rest)**: Highest contribution due to maximum weight (950) and risk (5)
|
||||
|
||||
### Example 3: Multi-Pillar Comprehensive Scenario
|
||||
|
||||
|
||||
| Pillar | Subpillar | Requirement | Pass | Fail | Weight | Risk | Pass Rate |
|
||||
|--------|-----------|-------------|------|------|--------|------|-----------|
|
||||
| 1. IAM | 1.1 Authentication | MFA Implementation | 180 | 20 | 900 | 5 | 90% |
|
||||
| 1. IAM | 1.2 Authorization | Least Privilege Access | 150 | 50 | 850 | 4 | 75% |
|
||||
| 1. IAM | 1.3 Privilege Escalation | Admin Account Controls | 95 | 5 | 950 | 5 | 95% |
|
||||
| 2. Attack Surface | 2.1 Network | Firewall Configuration | 400 | 100 | 600 | 3 | 80% |
|
||||
| 2. Attack Surface | 2.1 Network | Public Endpoint Security | 80 | 20 | 700 | 4 | 80% |
|
||||
| 2. Attack Surface | 2.2 Storage | Data Classification | 300 | 100 | 650 | 3 | 75% |
|
||||
| 2. Attack Surface | 2.3 Application | Input Validation | 150 | 50 | 500 | 3 | 75% |
|
||||
| 3. Logging and Monitoring | 3.1 Logging | Transaction Logging | 500 | 50 | 750 | 3 | 90.9% |
|
||||
| 3. Logging and Monitoring | 3.3 Monitoring | Real-time Alerts | 200 | 50 | 700 | 4 | 80% |
|
||||
| 4. Encryption | 4.2 At-Rest | Database Encryption | 300 | 20 | 900 | 5 | 93.8% |
|
||||
| 4. Encryption | 4.1 In-Transit | API/Web Encryption | 250 | 10 | 800 | 4 | 96.2% |
|
||||
|
||||
**Pillar Performance Summary**:
|
||||
|
||||
- **1. IAM Pillar Average**: ~87% (weighted by findings across Authentication, Authorization, and Privilege Escalation subpillars)
|
||||
- **2. Attack Surface Pillar Average**: ~77% (weighted across Network, Storage, and Application subpillars)
|
||||
- 2.1 Network subpillar: ~80% average
|
||||
- 2.2 Storage subpillar: 75%
|
||||
- 2.3 Application subpillar: 75%
|
||||
- **3. Logging and Monitoring Average**: ~87% (weighted by findings across Logging and Monitoring subpillars)
|
||||
- **4. Encryption Pillar Average**: ~94% (weighted by findings across In-Transit and At-Rest subpillars)
|
||||
|
||||
**Overall ThreatScore**: ~85.3%
|
||||
|
||||
This comprehensive example demonstrates how:
|
||||
|
||||
- High-performing, high-weight pillars (4. Encryption, 1. IAM) significantly boost the score
|
||||
- The 2. Attack Surface pillar shows how diverse subpillars (Network, Storage, Application) are aggregated
|
||||
- Multiple requirements within pillars provide detailed granular assessment
|
||||
- Cross-pillar balance prevents single points of failure in security posture
|
||||
|
||||
### Example 4: Impact of Parameter Changes
|
||||
|
||||
Using the scenario, let's see how parameter changes affect the score:
|
||||
|
||||
#### Scenario A: Increase Encryption Risk Level
|
||||
|
||||
Change Encryption risk from 5 to 3:
|
||||
|
||||
- **New ThreatScore: 77.8%** (decrease of 3.4 points)
|
||||
- **Impact**: Lower risk weighting reduces the influence of high-performing critical controls
|
||||
|
||||
#### Scenario B: Improve Access Controls Pass Rate
|
||||
|
||||
Change Access Controls from 70% to 90% pass rate:
|
||||
|
||||
- **New ThreatScore: 85.1%** (increase of 3.9 points)
|
||||
- **Impact**: Improving performance on high-weight requirements has significant score impact
|
||||
|
||||
#### Scenario C: Add New Low-Weight Requirement
|
||||
|
||||
Add "Documentation Completeness" (50 PASS, 10 FAIL, weight=100, risk=1):
|
||||
|
||||
- **New ThreatScore: 81.3%** (minimal change of 0.1 points)
|
||||
- **Impact**: Low-weight requirements have minimal impact on overall score
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Edge Cases and Special Conditions
|
||||
|
||||
#### Zero Findings Scenario
|
||||
When a requirement has `total_i = 0` (no findings):
|
||||
|
||||
- **Behavior**: Requirement is completely excluded from calculation
|
||||
- **Rationale**: No evidence means no contribution to confidence in the score
|
||||
- **Impact**: Other requirements receive proportionally more influence
|
||||
|
||||
#### Perfect Score Scenario
|
||||
When all requirements have 100% pass rate:
|
||||
|
||||
- **Result**: ThreatScore = 100%
|
||||
- **Interpretation**: All implemented security checks are passing
|
||||
|
||||
#### Zero Pass Rate Scenario
|
||||
When all requirements have 0% pass rate:
|
||||
|
||||
- **Result**: ThreatScore = 0%
|
||||
- **Interpretation**: Critical security failures across all requirements
|
||||
|
||||
#### Single Requirement Framework
|
||||
For frameworks with only one requirement:
|
||||
|
||||
- **Formula simplification**: ThreatScore = pass_rate × 100
|
||||
- **Impact**: Weight and risk values become irrelevant for score calculation
|
||||
|
||||
### Performance Considerations
|
||||
|
||||
#### Computational Complexity
|
||||
- **Time Complexity**: O(n) where n = number of requirements
|
||||
- **Space Complexity**: O(1) - constant space for accumulation
|
||||
- **Scalability**: Efficiently handles frameworks with thousands of requirements
|
||||
|
||||
#### Calculation Precision
|
||||
- **Floating Point**: Use double precision for intermediate calculations
|
||||
- **Rounding**: Final score rounded to 1 decimal place for display
|
||||
- **Overflow Protection**: Validate that weight × risk × total values don't exceed system limits
|
||||
|
||||
### Data Requirements
|
||||
|
||||
#### Minimum Data Set
|
||||
For each requirement, the following data must be available:
|
||||
|
||||
- **pass_count**: Number of PASS findings (integer ≥ 0)
|
||||
- **fail_count**: Number of FAIL findings (integer ≥ 0)
|
||||
- **weight**: Business importance (integer 1-1000)
|
||||
- **risk**: Risk level (integer 1-5)
|
||||
|
||||
#### Data Validation Rules
|
||||
```
|
||||
total_i = pass_i + fail_i
|
||||
rate_i = pass_i / total_i (when total_i > 0)
|
||||
1 ≤ weight_i ≤ 1000
|
||||
1 ≤ risk_i ≤ 5
|
||||
```
|
||||
|
||||
#### Handling Invalid Data
|
||||
- **Negative values**: Treat as 0 and log warning
|
||||
- **Out-of-range weights/risk**: Clamp to valid range and log warning
|
||||
- **Missing data**: Exclude requirement from calculation and log warning
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Monitoring and Trending
|
||||
|
||||
1. **Establish Baseline**
|
||||
- Record initial ThreatScore after implementing measurement
|
||||
- Set realistic improvement targets based on organizational capacity
|
||||
- Track score changes over time to identify trends
|
||||
|
||||
2. **Regular Reporting**
|
||||
- Generate monthly ThreatScore reports for stakeholders
|
||||
- Highlight significant score changes and their causes
|
||||
- Include requirement-level breakdowns for detailed analysis
|
||||
|
||||
3. **Continuous Improvement**
|
||||
- Use score trends to identify systematic issues
|
||||
- Correlate score changes with security incidents or changes
|
||||
- Adjust weights and risk levels based on lessons learned
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
# GCP Authentication in Prowler
|
||||
|
||||
Prowler for Google Cloud supports multiple authentication methods. To use a specific method, configure the appropriate credentials during execution:
|
||||
|
||||
- [**User Credentials** (Application Default Credentials)](#application-default-credentials-user-credentials)
|
||||
- [**Service Account Key File**](#service-account-key-file)
|
||||
- [**Access Token**](#access-token)
|
||||
- [**Service Account Impersonation**](#service-account-impersonation)
|
||||
|
||||
## Required Permissions
|
||||
|
||||
Prowler for Google Cloud requires the following permissions:
|
||||
@@ -33,28 +40,92 @@ At least one project must have the following configurations:
|
||||
```
|
||||
|
||||
???+ note
|
||||
`prowler` will scan the GCP project associated with the credentials.
|
||||
|
||||
## Credentials lookup order
|
||||
|
||||
Prowler follows the same credential search process as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order), checking credentials in this order:
|
||||
|
||||
1. [`GOOGLE_APPLICATION_CREDENTIALS` environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
|
||||
2. [`CLOUDSDK_AUTH_ACCESS_TOKEN` + optional `GOOGLE_CLOUD_PROJECT`](https://cloud.google.com/sdk/gcloud/reference/auth/print-access-token)
|
||||
3. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
4. [Attached service account (e.g., Cloud Run, GCE, Cloud Functions)](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
???+ note
|
||||
The credentials must belong to a user or service account with the necessary permissions.
|
||||
To ensure full access, assign the roles/reader IAM role to the identity being used.
|
||||
|
||||
???+ note
|
||||
Prowler will use the enabled Google Cloud APIs to get the information needed to perform the checks.
|
||||
Prowler will scan the GCP project associated with the credentials.
|
||||
|
||||
|
||||
## Application Default Credentials (User Credentials)
|
||||
|
||||
This method uses the Google Cloud CLI to authenticate and is suitable for development and testing environments.
|
||||
|
||||
## Using an Access Token
|
||||
### Setup Application Default Credentials
|
||||
|
||||
1. In the [GCP Console](https://console.cloud.google.com/), click on "Activate Cloud Shell"
|
||||
|
||||

|
||||
|
||||
2. Click "Authorize Cloud Shell"
|
||||
|
||||

|
||||
|
||||
3. Run the following command:
|
||||
|
||||
```bash
|
||||
gcloud auth application-default login
|
||||
```
|
||||
|
||||
- Type `Y` when prompted
|
||||
|
||||

|
||||
|
||||
4. Open the authentication URL provided in a browser and select your Google account
|
||||
|
||||

|
||||
|
||||
5. Follow the steps to obtain the authentication code
|
||||
|
||||

|
||||
|
||||
6. Paste the authentication code back in Cloud Shell
|
||||
|
||||

|
||||
|
||||
7. Use `cat <file_name>` to view the temporary credentials file
|
||||
|
||||

|
||||
|
||||
8. Extract the following values for Prowler Cloud/App:
|
||||
|
||||
- `client_id`
|
||||
- `client_secret`
|
||||
- `refresh_token`
|
||||
|
||||

|
||||
|
||||
### Using with Prowler CLI
|
||||
|
||||
Once application default credentials are set up, run Prowler directly:
|
||||
|
||||
```console
|
||||
prowler gcp --project-ids <project-id>
|
||||
```
|
||||
|
||||
## Service Account Key File
|
||||
|
||||
This method uses a service account with a downloaded key file for authentication.
|
||||
|
||||
### Create Service Account and Key
|
||||
|
||||
1. Go to the [Service Accounts page](https://console.cloud.google.com/iam-admin/serviceaccounts) in the GCP Console
|
||||
2. Click "Create Service Account"
|
||||
3. Fill in the service account details and click "Create and Continue"
|
||||
4. Grant the service account the "Reader" role
|
||||
5. Click "Done"
|
||||
6. Find your service account in the list and click on it
|
||||
7. Go to the "Keys" tab
|
||||
8. Click "Add Key" > "Create new key"
|
||||
9. Select "JSON" and click "Create"
|
||||
10. Save the downloaded key file securely
|
||||
|
||||
### Using with Prowler CLI
|
||||
|
||||
Set the `GOOGLE_APPLICATION_CREDENTIALS` environment variable:
|
||||
|
||||
```console
|
||||
export GOOGLE_APPLICATION_CREDENTIALS="/path/to/service-account-key.json"
|
||||
prowler gcp --project-ids <project-id>
|
||||
```
|
||||
|
||||
## Access Token
|
||||
|
||||
For existing access tokens (e.g., generated with `gcloud auth print-access-token`), run Prowler with:
|
||||
|
||||
@@ -69,10 +140,7 @@ prowler gcp --project-ids <project-id>
|
||||
export GOOGLE_CLOUD_PROJECT=<project-id>
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
## Impersonating a GCP Service Account
|
||||
## Service Account Impersonation
|
||||
|
||||
To impersonate a GCP service account, use the `--impersonate-service-account` argument followed by the service account email:
|
||||
|
||||
@@ -81,3 +149,13 @@ prowler gcp --impersonate-service-account <service-account-email>
|
||||
```
|
||||
|
||||
This command leverages the default credentials to impersonate the specified service account.
|
||||
|
||||
### Prerequisites for Impersonation
|
||||
|
||||
The identity running Prowler must have the following permission on the target service account:
|
||||
|
||||
- `roles/iam.serviceAccountTokenCreator`
|
||||
|
||||
Or the more specific permission:
|
||||
|
||||
- `iam.serviceAccounts.generateAccessToken`
|
||||
|
||||
@@ -1,105 +1,121 @@
|
||||
# Getting Started with GCP on Prowler Cloud/App
|
||||
# Getting Started With GCP on Prowler
|
||||
|
||||
Set up your GCP project to enable security scanning using Prowler Cloud/App.
|
||||
## Prowler App
|
||||
|
||||
## Requirements
|
||||
|
||||
To configure your GCP project, you’ll need:
|
||||
|
||||
1. Get the `Project ID`
|
||||
2. Access to Prowler Cloud/App
|
||||
3. Configure authentication in GCP:
|
||||
|
||||
3.1 Retrieve credentials from Google Cloud
|
||||
|
||||
4. Add the credentials to Prowler Cloud/App
|
||||
|
||||
---
|
||||
|
||||
## Step 1: Get the Project ID
|
||||
### Step 1: Get the GCP Project ID
|
||||
|
||||
1. Go to the [GCP Console](https://console.cloud.google.com/)
|
||||
2. Locate your Project ID on the welcome screen
|
||||
2. Locate the Project ID on the welcome screen
|
||||
|
||||

|
||||
|
||||
---
|
||||
### Step 2: Access Prowler Cloud or Prowler App
|
||||
|
||||
## Step 2: Access Prowler Cloud/App
|
||||
|
||||
1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](../prowler-app.md)
|
||||
2. Navigate to `Configuration` > `Cloud Providers`
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](../prowler-app.md)
|
||||
2. Go to "Configuration" > "Cloud Providers"
|
||||
|
||||

|
||||
|
||||
3. Click `Add Cloud Provider`
|
||||
3. Click "Add Cloud Provider"
|
||||
|
||||

|
||||
|
||||
4. Select `Google Cloud Platform`
|
||||
4. Select "Google Cloud Platform"
|
||||
|
||||

|
||||
|
||||
5. Add the Project ID and optionally provide a provider alias, then click `Next`
|
||||
5. Add the Project ID and optionally provide a provider alias, then click "Next"
|
||||
|
||||

|
||||
|
||||
---
|
||||
### Step 3: Set Up GCP Authentication
|
||||
|
||||
## Step 3: Configure Authentication in GCP
|
||||
Choose the preferred authentication mode before proceeding:
|
||||
|
||||
### Retrieve Credentials from Google Cloud
|
||||
**User Credentials (Application Default Credentials)**
|
||||
|
||||
1. In the [GCP Console](https://console.cloud.google.com/), click on `Activate Cloud Shell`
|
||||
* Quick scan as current user
|
||||
* Uses Google Cloud CLI authentication
|
||||
* Credentials may time out
|
||||
|
||||

|
||||
**Service Account Key File**
|
||||
|
||||
2. Click `Authorize Cloud Shell`
|
||||
* Authenticates as a service identity
|
||||
* Stable and auditable
|
||||
* Recommended for production
|
||||
|
||||

|
||||
For detailed instructions on how to set up authentication, see [Authentication](./authentication.md).
|
||||
|
||||
3. Run the following command:
|
||||
6. Once credentials are configured, return to Prowler App and enter the required values:
|
||||
|
||||
```bash
|
||||
gcloud auth application-default login
|
||||
```
|
||||
For "Service Account Key":
|
||||
|
||||
- Type `Y` when prompted
|
||||
- `Service Account Key JSON`
|
||||
|
||||

|
||||
|
||||
4. Open the authentication URL provided in a browser and select your Google account
|
||||
|
||||

|
||||
|
||||
5. Follow the steps to obtain the authentication code
|
||||
|
||||

|
||||
|
||||
6. Paste the authentication code back in Cloud Shell
|
||||
|
||||

|
||||
|
||||
7. Use `cat <file_name>` to view the temporary credentials file
|
||||
|
||||

|
||||
|
||||
8. Extract the following values for Prowler Cloud/App:
|
||||
For "Application Default Credentials":
|
||||
|
||||
- `client_id`
|
||||
- `client_secret`
|
||||
- `refresh_token`
|
||||
|
||||

|
||||

|
||||
|
||||
7. Click "Next", then "Launch Scan"
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## Step 4: Add Credentials to Prowler Cloud/App
|
||||
## Prowler CLI
|
||||
|
||||
1. Go back to Prowler Cloud/App and enter the required credentials, then click `Next`
|
||||
### Credentials Lookup Order
|
||||
|
||||

|
||||
Prowler follows the same credential search process as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order), checking credentials in this order:
|
||||
|
||||
2. Click `Launch Scan` to begin scanning your GCP environment
|
||||
1. [`GOOGLE_APPLICATION_CREDENTIALS` environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
|
||||
2. [`CLOUDSDK_AUTH_ACCESS_TOKEN` + optional `GOOGLE_CLOUD_PROJECT`](https://cloud.google.com/sdk/gcloud/reference/auth/print-access-token)
|
||||
3. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
4. [Attached service account (e.g., Cloud Run, GCE, Cloud Functions)](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||

|
||||
???+ note
|
||||
The credentials must belong to a user or service account with the necessary permissions.
|
||||
For detailed instructions on how to set the permissions, see [Authentication > Required Permissions](./authentication.md#required-permissions).
|
||||
|
||||
???+ note
|
||||
Prowler will use the enabled Google Cloud APIs to get the information needed to perform the checks.
|
||||
|
||||
### Configure GCP Credentials
|
||||
|
||||
To authenticate with GCP, use one of the following methods:
|
||||
|
||||
```console
|
||||
gcloud auth application-default login
|
||||
```
|
||||
|
||||
or set the credentials file path:
|
||||
|
||||
```console
|
||||
export GOOGLE_APPLICATION_CREDENTIALS="/path/to/credentials.json"
|
||||
```
|
||||
|
||||
These credentials must belong to a user or service account with the necessary permissions to perform security checks.
|
||||
|
||||
For more authentication details, see the [Authentication](./authentication.md) page.
|
||||
|
||||
### Project Specification
|
||||
|
||||
To scan specific projects, specify them with the following command:
|
||||
|
||||
```console
|
||||
prowler gcp --project-ids <project-id-1> <project-id-2>
|
||||
```
|
||||
|
||||
### Service Account Impersonation
|
||||
|
||||
For service account impersonation, use the `--impersonate-service-account` flag:
|
||||
|
||||
```console
|
||||
prowler gcp --impersonate-service-account <service-account-email>
|
||||
```
|
||||
|
||||
More details on authentication methods in the [Authentication](./authentication.md) page.
|
||||
|
||||
@@ -1,47 +1,211 @@
|
||||
# Github Authentication in Prowler
|
||||
# GitHub Authentication in Prowler
|
||||
|
||||
Prowler supports multiple methods to [authenticate with GitHub](https://docs.github.com/en/rest/authentication/authenticating-to-the-rest-api). These include:
|
||||
|
||||
- **Personal Access Token (PAT)**
|
||||
- **OAuth App Token**
|
||||
- **GitHub App Credentials**
|
||||
- [Personal Access Token (PAT)](./authentication.md#personal-access-token-pat)
|
||||
- [OAuth App Token](./authentication.md#oauth-app-token)
|
||||
- [GitHub App Credentials](./authentication.md#github-app-credentials)
|
||||
|
||||
This flexibility enables scanning and analysis of GitHub accounts, including repositories, organizations, and applications, using the method that best suits the use case.
|
||||
|
||||
## Supported Login Methods
|
||||
## Personal Access Token (PAT)
|
||||
|
||||
Here are the available login methods and their respective flags:
|
||||
Personal Access Tokens provide the simplest GitHub authentication method, but it can only access resources owned by a single user or organization.
|
||||
|
||||
### Personal Access Token (PAT)
|
||||
???+ warning "Classic Tokens Deprecated"
|
||||
GitHub has deprecated Personal Access Tokens (classic) in favor of fine-grained Personal Access Tokens. We recommend using fine-grained tokens as they provide better security through more granular permissions and resource-specific access control.
|
||||
|
||||
Use this method by providing your personal access token directly.
|
||||
#### **Option 1: Create a Fine-Grained Personal Access Token (Recommended)**
|
||||
|
||||
```console
|
||||
prowler github --personal-access-token pat
|
||||
```
|
||||
1. **Navigate to GitHub Settings**
|
||||
- Open [GitHub](https://github.com) and sign in
|
||||
- Click the profile picture in the top right corner
|
||||
- Select "Settings" from the dropdown menu
|
||||
|
||||
### OAuth App Token
|
||||
2. **Access Developer Settings**
|
||||
- Scroll down the left sidebar
|
||||
- Click "Developer settings"
|
||||
|
||||
Authenticate using an OAuth app token.
|
||||
3. **Generate Fine-Grained Token**
|
||||
- Click "Personal access tokens"
|
||||
- Select "Fine-grained tokens"
|
||||
- Click "Generate new token"
|
||||
|
||||
```console
|
||||
prowler github --oauth-app-token oauth_token
|
||||
```
|
||||
4. **Configure Token Settings**
|
||||
- **Token name**: Give your token a descriptive name (e.g., "Prowler Security Scanner")
|
||||
- **Expiration**: Set an appropriate expiration date (recommended: 90 days or less)
|
||||
- **Repository access**: Choose "All repositories" or "Only select repositories" based on your needs
|
||||
|
||||
### GitHub App Credentials
|
||||
Use GitHub App credentials by specifying the App ID and the private key path.
|
||||
???+ note "Public repositories"
|
||||
Even if you select 'Only select repositories', the token will have access to the public repositories that you own or are a member of.
|
||||
|
||||
```console
|
||||
prowler github --github-app-id app_id --github-app-key-path app_key_path
|
||||
```
|
||||
5. **Configure Token Permissions**
|
||||
To enable Prowler functionality, configure the following permissions:
|
||||
|
||||
### Automatic Login Method Detection
|
||||
- **Repository permissions:**
|
||||
- **Administration**: Read-only access
|
||||
- **Contents**: Read-only access
|
||||
- **Metadata**: Read-only access
|
||||
- **Pull requests**: Read-only access
|
||||
|
||||
If no login method is explicitly provided, Prowler will automatically attempt to authenticate using environment variables in the following order of precedence:
|
||||
- **Organization permissions:**
|
||||
- **Administration**: Read-only access
|
||||
- **Members**: Read-only access
|
||||
|
||||
1. `GITHUB_PERSONAL_ACCESS_TOKEN`
|
||||
2. `GITHUB_OAUTH_APP_TOKEN`
|
||||
3. `GITHUB_APP_ID` and `GITHUB_APP_KEY` (where the key is the content of the private key file)
|
||||
- **Account permissions:**
|
||||
- **Email addresses**: Read-only access
|
||||
|
||||
6. **Copy and Store the Token**
|
||||
- Copy the generated token immediately (GitHub displays tokens only once)
|
||||
- Store tokens securely using environment variables
|
||||
|
||||

|
||||
|
||||
#### **Option 2: Create a Classic Personal Access Token (Not Recommended)**
|
||||
|
||||
???+ warning "Security Risk"
|
||||
Classic tokens provide broad permissions that may exceed what Prowler actually needs. Use fine-grained tokens instead for better security.
|
||||
|
||||
1. **Navigate to GitHub Settings**
|
||||
- Open [GitHub](https://github.com) and sign in
|
||||
- Click the profile picture in the top right corner
|
||||
- Select "Settings" from the dropdown menu
|
||||
|
||||
2. **Access Developer Settings**
|
||||
- Scroll down the left sidebar
|
||||
- Click "Developer settings"
|
||||
|
||||
3. **Generate Classic Token**
|
||||
- Click "Personal access tokens"
|
||||
- Select "Tokens (classic)"
|
||||
- Click "Generate new token"
|
||||
|
||||
4. **Configure Token Permissions**
|
||||
To enable Prowler functionality, configure the following scopes:
|
||||
- `repo`: Full control of private repositories (includes `repo:status` and `repo:contents`)
|
||||
- `read:org`: Read organization and team membership
|
||||
- `read:user`: Read user profile data
|
||||
- `security_events`: Access security events (secret scanning and Dependabot alerts)
|
||||
- `read:enterprise`: Read enterprise data (if using GitHub Enterprise)
|
||||
|
||||
5. **Copy and Store the Token**
|
||||
- Copy the generated token immediately (GitHub displays tokens only once)
|
||||
- Store tokens securely using environment variables
|
||||
|
||||
## OAuth App Token
|
||||
|
||||
OAuth Apps enable applications to act on behalf of users with explicit consent.
|
||||
|
||||
### Create an OAuth App Token
|
||||
|
||||
1. **Navigate to Developer Settings**
|
||||
- Open GitHub Settings → Developer settings
|
||||
- Click "OAuth Apps"
|
||||
|
||||
2. **Register New Application**
|
||||
- Click "New OAuth App"
|
||||
- Complete the required fields:
|
||||
- **Application name**: Descriptive application name
|
||||
- **Homepage URL**: Application homepage
|
||||
- **Authorization callback URL**: User redirection URL after authorization
|
||||
|
||||
3. **Obtain Authorization Code**
|
||||
- Request authorization code (replace `{app_id}` with the application ID):
|
||||
```
|
||||
https://github.com/login/oauth/authorize?client_id={app_id}
|
||||
```
|
||||
|
||||
4. **Exchange Code for Token**
|
||||
- Exchange authorization code for access token (replace `{app_id}`, `{secret}`, and `{code}`):
|
||||
```
|
||||
https://github.com/login/oauth/access_token?code={code}&client_id={app_id}&client_secret={secret}
|
||||
```
|
||||
|
||||
## GitHub App Credentials
|
||||
GitHub Apps provide the recommended integration method for accessing multiple repositories or organizations.
|
||||
|
||||
### Create a GitHub App
|
||||
|
||||
1. **Navigate to Developer Settings**
|
||||
- Open GitHub Settings → Developer settings
|
||||
- Click "GitHub Apps"
|
||||
|
||||
2. **Create New GitHub App**
|
||||
- Click "New GitHub App"
|
||||
- Complete the required fields:
|
||||
- **GitHub App name**: Unique application name
|
||||
- **Homepage URL**: Application homepage
|
||||
- **Webhook URL**: Webhook payload URL (optional)
|
||||
- **Permissions**: Application permission requirements
|
||||
|
||||
3. **Configure Permissions**
|
||||
To enable Prowler functionality, configure these permissions:
|
||||
- **Repository permissions**:
|
||||
- Contents (Read)
|
||||
- Metadata (Read)
|
||||
- Pull requests (Read)
|
||||
- **Organization permissions**:
|
||||
- Members (Read)
|
||||
- Administration (Read)
|
||||
- **Account permissions**:
|
||||
- Email addresses (Read)
|
||||
|
||||
4. **Where can this GitHub App be installed?**
|
||||
- Select "Any account" to be able to install the GitHub App in any organization.
|
||||
|
||||
5. **Generate Private Key**
|
||||
- Scroll to the "Private keys" section after app creation
|
||||
- Click "Generate a private key"
|
||||
- Download the `.pem` file and store securely
|
||||
|
||||
5. **Record App ID**
|
||||
- Locate the App ID at the top of the GitHub App settings page
|
||||
|
||||
### Install the GitHub App
|
||||
|
||||
1. **Install Application**
|
||||
- Navigate to GitHub App settings
|
||||
- Click "Install App" in the left sidebar
|
||||
- Select the target account/organization
|
||||
- Choose specific repositories or select "All repositories"
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Security Considerations
|
||||
|
||||
Implement the following security measures:
|
||||
|
||||
- **Secure Credential Storage**: Store credentials using environment variables instead of hardcoding tokens
|
||||
- **Secrets Management**: Use dedicated secrets management systems in production environments
|
||||
- **Regular Token Rotation**: Rotate tokens and keys regularly
|
||||
- **Least Privilege Principle**: Grant only minimum required permissions
|
||||
- **Permission Auditing**: Review and audit permissions regularly
|
||||
- **Token Expiration**: Set appropriate expiration times for tokens
|
||||
- **Usage Monitoring**: Monitor token usage and revoke unused tokens
|
||||
|
||||
### Authentication Method Selection
|
||||
|
||||
Choose the appropriate method based on use case:
|
||||
|
||||
- **Personal Access Token**: Individual use, testing, or simple automation
|
||||
- **OAuth App Token**: Applications requiring user consent and delegation
|
||||
- **GitHub App**: Production integrations, especially for organizations
|
||||
|
||||
## Troubleshooting Common Issues
|
||||
|
||||
### Insufficient Permissions
|
||||
- Verify token/app has necessary scopes/permissions
|
||||
- Check organization restrictions on third-party applications
|
||||
|
||||
### Token Expiration
|
||||
- Confirm token has not expired
|
||||
- Verify fine-grained tokens have correct resource access
|
||||
|
||||
### Rate Limiting
|
||||
- GitHub implements API call rate limits
|
||||
- Consider GitHub Apps for higher rate limits
|
||||
|
||||
### Organization Settings
|
||||
- Some organizations restrict third-party applications
|
||||
- Contact organization administrator if access is denied
|
||||
|
||||
???+ note
|
||||
Ensure the corresponding environment variables are set up before running Prowler for automatic detection when not specifying the login method.
|
||||
|
||||
@@ -1,264 +1,90 @@
|
||||
# Getting Started with GitHub
|
||||
|
||||
This guide explains how to set up authentication with GitHub for Prowler. The documentation covers credential retrieval processes for each supported authentication method.
|
||||
## Prowler App
|
||||
|
||||
## Prerequisites
|
||||
<iframe width="560" height="380" src="https://www.youtube-nocookie.com/embed/9ETI84Xpu2g" title="Prowler Cloud Onboarding Github" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" allowfullscreen="1"></iframe>
|
||||
|
||||
- GitHub account
|
||||
- Token creation permissions (organization-level access requires admin permissions)
|
||||
> Walkthrough video onboarding a GitHub Account using GitHub App.
|
||||
|
||||
## Authentication Methods
|
||||
### Step 1: Access Prowler Cloud/App
|
||||
|
||||
### 1. Personal Access Token (PAT)
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](../prowler-app.md)
|
||||
2. Go to "Configuration" > "Cloud Providers"
|
||||
|
||||
Personal Access Tokens provide the simplest GitHub authentication method, but it can only access resources owned by a single user or organization.
|
||||

|
||||
|
||||
???+ warning "Classic Tokens Deprecated"
|
||||
GitHub has deprecated Personal Access Tokens (classic) in favor of fine-grained Personal Access Tokens. We recommend using fine-grained tokens as they provide better security through more granular permissions and resource-specific access control.
|
||||
3. Click "Add Cloud Provider"
|
||||
|
||||
#### **Option 1: Create a Fine-Grained Personal Access Token (Recommended)**
|
||||

|
||||
|
||||
1. **Navigate to GitHub Settings**
|
||||
- Open [GitHub](https://github.com) and sign in
|
||||
- Click the profile picture in the top right corner
|
||||
- Select "Settings" from the dropdown menu
|
||||
4. Select "GitHub"
|
||||
|
||||
2. **Access Developer Settings**
|
||||
- Scroll down the left sidebar
|
||||
- Click "Developer settings"
|
||||

|
||||
|
||||
3. **Generate Fine-Grained Token**
|
||||
- Click "Personal access tokens"
|
||||
- Select "Fine-grained tokens"
|
||||
- Click "Generate new token"
|
||||
5. Add the GitHub Account ID (username or organization name) and an optional alias, then click "Next"
|
||||
|
||||
4. **Configure Token Settings**
|
||||
- **Token name**: Give your token a descriptive name (e.g., "Prowler Security Scanner")
|
||||
- **Expiration**: Set an appropriate expiration date (recommended: 90 days or less)
|
||||
- **Repository access**: Choose "All repositories" or "Only select repositories" based on your needs
|
||||

|
||||
|
||||
???+ note "Public repositories"
|
||||
Even if you select 'Only select repositories', the token will have access to the public repositories that you own or are a member of.
|
||||
### Step 2: Choose the preferred authentication method
|
||||
|
||||
5. **Configure Token Permissions**
|
||||
To enable Prowler functionality, configure the following permissions:
|
||||
6. Choose the preferred authentication method:
|
||||
|
||||
- **Repository permissions:**
|
||||
- **Administration**: Read-only access
|
||||
- **Contents**: Read-only access
|
||||
- **Metadata**: Read-only access
|
||||
- **Pull requests**: Read-only access
|
||||

|
||||
|
||||
- **Organization permissions:**
|
||||
- **Administration**: Read-only access
|
||||
- **Members**: Read-only access
|
||||
7. Configure the authentication method:
|
||||
|
||||
- **Account permissions:**
|
||||
- **Email addresses**: Read-only access
|
||||
=== "Personal Access Token"
|
||||

|
||||
|
||||
6. **Copy and Store the Token**
|
||||
- Copy the generated token immediately (GitHub displays tokens only once)
|
||||
- Store tokens securely using environment variables
|
||||
For more details on how to create a Personal Access Token, see [Authentication > Personal Access Token](./authentication.md#personal-access-token-pat).
|
||||
|
||||

|
||||
=== "OAuth App Token"
|
||||
|
||||
#### **Option 2: Create a Classic Personal Access Token (Not Recommended)**
|
||||

|
||||
|
||||
???+ warning "Security Risk"
|
||||
Classic tokens provide broad permissions that may exceed what Prowler actually needs. Use fine-grained tokens instead for better security.
|
||||
For more details on how to create an OAuth App Token, see [Authentication > OAuth App Token](./authentication.md#oauth-app-token).
|
||||
|
||||
1. **Navigate to GitHub Settings**
|
||||
- Open [GitHub](https://github.com) and sign in
|
||||
- Click the profile picture in the top right corner
|
||||
- Select "Settings" from the dropdown menu
|
||||
=== "GitHub App"
|
||||
|
||||
2. **Access Developer Settings**
|
||||
- Scroll down the left sidebar
|
||||
- Click "Developer settings"
|
||||

|
||||
|
||||
3. **Generate Classic Token**
|
||||
- Click "Personal access tokens"
|
||||
- Select "Tokens (classic)"
|
||||
- Click "Generate new token"
|
||||
For more details on how to create a GitHub App, see [Authentication > GitHub App](./authentication.md#github-app-credentials).
|
||||
|
||||
4. **Configure Token Permissions**
|
||||
To enable Prowler functionality, configure the following scopes:
|
||||
- `repo`: Full control of private repositories (includes `repo:status` and `repo:contents`)
|
||||
- `read:org`: Read organization and team membership
|
||||
- `read:user`: Read user profile data
|
||||
- `security_events`: Access security events (secret scanning and Dependabot alerts)
|
||||
- `read:enterprise`: Read enterprise data (if using GitHub Enterprise)
|
||||
|
||||
5. **Copy and Store the Token**
|
||||
- Copy the generated token immediately (GitHub displays tokens only once)
|
||||
- Store tokens securely using environment variables
|
||||
## Prowler CLI
|
||||
|
||||
#### How to Use Personal Access Tokens
|
||||
### Automatic Login Method Detection
|
||||
|
||||
Choose one of the following methods:
|
||||
If no login method is explicitly provided, Prowler will automatically attempt to authenticate using environment variables in the following order of precedence:
|
||||
|
||||
**Command-line flag:**
|
||||
1. `GITHUB_PERSONAL_ACCESS_TOKEN`
|
||||
2. `GITHUB_OAUTH_APP_TOKEN`
|
||||
3. `GITHUB_APP_ID` and `GITHUB_APP_KEY` (where the key is the content of the private key file)
|
||||
|
||||
???+ note
|
||||
Ensure the corresponding environment variables are set up before running Prowler for automatic detection when not specifying the login method.
|
||||
|
||||
For more details on how to set up authentication with GitHub, see [Authentication > GitHub](./authentication.md).
|
||||
|
||||
### Personal Access Token (PAT)
|
||||
|
||||
Use this method by providing your personal access token directly.
|
||||
|
||||
```console
|
||||
prowler github --personal-access-token your_token_here
|
||||
prowler github --personal-access-token pat
|
||||
```
|
||||
|
||||
**Environment variable:**
|
||||
### OAuth App Token
|
||||
|
||||
Authenticate using an OAuth app token.
|
||||
|
||||
```console
|
||||
export GITHUB_PERSONAL_ACCESS_TOKEN="your_token_here"
|
||||
prowler github
|
||||
prowler github --oauth-app-token oauth_token
|
||||
```
|
||||
|
||||
### 2. OAuth App Token
|
||||
|
||||
OAuth Apps enable applications to act on behalf of users with explicit consent.
|
||||
|
||||
#### How to Create an OAuth App
|
||||
|
||||
1. **Navigate to Developer Settings**
|
||||
- Open GitHub Settings → Developer settings
|
||||
- Click "OAuth Apps"
|
||||
|
||||
2. **Register New Application**
|
||||
- Click "New OAuth App"
|
||||
- Complete the required fields:
|
||||
- **Application name**: Descriptive application name
|
||||
- **Homepage URL**: Application homepage
|
||||
- **Authorization callback URL**: User redirection URL after authorization
|
||||
|
||||
3. **Obtain Authorization Code**
|
||||
- Request authorization code (replace `{app_id}` with the application ID):
|
||||
```
|
||||
https://github.com/login/oauth/authorize?client_id={app_id}
|
||||
```
|
||||
|
||||
4. **Exchange Code for Token**
|
||||
- Exchange authorization code for access token (replace `{app_id}`, `{secret}`, and `{code}`):
|
||||
```
|
||||
https://github.com/login/oauth/access_token?code={code}&client_id={app_id}&client_secret={secret}
|
||||
```
|
||||
|
||||
#### How to Use OAuth Tokens
|
||||
|
||||
Choose one of the following methods:
|
||||
|
||||
**Command-line flag:**
|
||||
### GitHub App Credentials
|
||||
Use GitHub App credentials by specifying the App ID and the private key path.
|
||||
|
||||
```console
|
||||
prowler github --oauth-app-token your_oauth_token
|
||||
prowler github --github-app-id app_id --github-app-key-path app_key_path
|
||||
```
|
||||
|
||||
**Environment variable:**
|
||||
|
||||
```console
|
||||
export GITHUB_OAUTH_APP_TOKEN="your_oauth_token"
|
||||
prowler github
|
||||
```
|
||||
|
||||
### 3. GitHub App Credentials
|
||||
|
||||
GitHub Apps provide the recommended integration method for accessing multiple repositories or organizations.
|
||||
|
||||
#### How to Create a GitHub App
|
||||
|
||||
1. **Navigate to Developer Settings**
|
||||
- Open GitHub Settings → Developer settings
|
||||
- Click "GitHub Apps"
|
||||
|
||||
2. **Create New GitHub App**
|
||||
- Click "New GitHub App"
|
||||
- Complete the required fields:
|
||||
- **GitHub App name**: Unique application name
|
||||
- **Homepage URL**: Application homepage
|
||||
- **Webhook URL**: Webhook payload URL (optional)
|
||||
- **Permissions**: Application permission requirements
|
||||
|
||||
3. **Configure Permissions**
|
||||
To enable Prowler functionality, configure these permissions:
|
||||
- **Repository permissions**:
|
||||
- Contents (Read)
|
||||
- Metadata (Read)
|
||||
- Pull requests (Read)
|
||||
- **Organization permissions**:
|
||||
- Members (Read)
|
||||
- Administration (Read)
|
||||
- **Account permissions**:
|
||||
- Email addresses (Read)
|
||||
|
||||
4. **Where can this GitHub App be installed?**
|
||||
- Select "Any account" to be able to install the GitHub App in any organization.
|
||||
|
||||
5. **Generate Private Key**
|
||||
- Scroll to the "Private keys" section after app creation
|
||||
- Click "Generate a private key"
|
||||
- Download the `.pem` file and store securely
|
||||
|
||||
5. **Record App ID**
|
||||
- Locate the App ID at the top of the GitHub App settings page
|
||||
|
||||
#### How to Install the GitHub App
|
||||
|
||||
1. **Install Application**
|
||||
- Navigate to GitHub App settings
|
||||
- Click "Install App" in the left sidebar
|
||||
- Select the target account/organization
|
||||
- Choose specific repositories or select "All repositories"
|
||||
|
||||
#### How to Use GitHub App Credentials
|
||||
|
||||
Choose one of the following methods:
|
||||
|
||||
**Command-line flags:**
|
||||
|
||||
```console
|
||||
prowler github --github-app-id your_app_id --github-app-key /path/to/private-key.pem
|
||||
```
|
||||
|
||||
**Environment variables:**
|
||||
|
||||
```console
|
||||
export GITHUB_APP_ID="your_app_id"
|
||||
export GITHUB_APP_KEY="private-key-content"
|
||||
prowler github
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Security Considerations
|
||||
|
||||
Implement the following security measures:
|
||||
|
||||
- **Secure Credential Storage**: Store credentials using environment variables instead of hardcoding tokens
|
||||
- **Secrets Management**: Use dedicated secrets management systems in production environments
|
||||
- **Regular Token Rotation**: Rotate tokens and keys regularly
|
||||
- **Least Privilege Principle**: Grant only minimum required permissions
|
||||
- **Permission Auditing**: Review and audit permissions regularly
|
||||
- **Token Expiration**: Set appropriate expiration times for tokens
|
||||
- **Usage Monitoring**: Monitor token usage and revoke unused tokens
|
||||
|
||||
### Authentication Method Selection
|
||||
|
||||
Choose the appropriate method based on use case:
|
||||
|
||||
- **Personal Access Token**: Individual use, testing, or simple automation
|
||||
- **OAuth App Token**: Applications requiring user consent and delegation
|
||||
- **GitHub App**: Production integrations, especially for organizations
|
||||
|
||||
## Troubleshooting Common Issues
|
||||
|
||||
### Insufficient Permissions
|
||||
- Verify token/app has necessary scopes/permissions
|
||||
- Check organization restrictions on third-party applications
|
||||
|
||||
### Token Expiration
|
||||
- Confirm token has not expired
|
||||
- Verify fine-grained tokens have correct resource access
|
||||
|
||||
### Rate Limiting
|
||||
- GitHub implements API call rate limits
|
||||
- Consider GitHub Apps for higher rate limits
|
||||
|
||||
### Organization Settings
|
||||
- Some organizations restrict third-party applications
|
||||
- Contact organization administrator if access is denied
|
||||
|
||||
|
After Width: | Height: | Size: 96 KiB |
|
After Width: | Height: | Size: 108 KiB |
|
After Width: | Height: | Size: 100 KiB |
|
After Width: | Height: | Size: 102 KiB |
|
After Width: | Height: | Size: 91 KiB |
|
After Width: | Height: | Size: 100 KiB |
@@ -1,10 +1,10 @@
|
||||
# Getting Started with the IaC Provider
|
||||
|
||||
Prowler's Infrastructure as Code (IaC) provider enables you to scan local or remote infrastructure code for security and compliance issues using [Trivy](https://trivy.dev/). This provider supports a wide range of IaC frameworks, allowing you to assess your code before deployment.
|
||||
Prowler's Infrastructure as Code (IaC) provider enables scanning of local or remote infrastructure code for security and compliance issues using [Trivy](https://trivy.dev/). This provider supports a wide range of IaC frameworks, allowing assessment of code before deployment.
|
||||
|
||||
## Supported Scanners
|
||||
|
||||
The IaC provider leverages Trivy to support multiple scanners, including:
|
||||
The IaC provider leverages [Trivy](https://trivy.dev/latest/docs/scanner/vulnerability/) to support multiple scanners, including:
|
||||
|
||||
- Vulnerability
|
||||
- Misconfiguration
|
||||
@@ -13,31 +13,34 @@ The IaC provider leverages Trivy to support multiple scanners, including:
|
||||
|
||||
## How It Works
|
||||
|
||||
- The IaC provider scans your local directory (or a specified path) for supported IaC files, or scan a remote repository.
|
||||
- The IaC provider scans local directories (or specified paths) for supported IaC files, or scans remote repositories.
|
||||
- No cloud credentials or authentication are required for local scans.
|
||||
- For remote repository scans, authentication can be provided via [git URL](https://git-scm.com/docs/git-clone#_git_urls), CLI flags or environment variables.
|
||||
- Check the [IaC Authentication](./authentication.md) page for more details.
|
||||
- Mutelist logic is handled by Trivy, not Prowler.
|
||||
- Results are output in the same formats as other Prowler providers (CSV, JSON, HTML, etc.).
|
||||
|
||||
## Usage
|
||||
## Prowler CLI
|
||||
|
||||
To run Prowler with the IaC provider, use the `iac` argument. You can specify the directory or repository to scan, frameworks to include, and paths to exclude.
|
||||
### Usage
|
||||
|
||||
### Scan a Local Directory (default)
|
||||
Use the `iac` argument to run Prowler with the IaC provider. Specify the directory or repository to scan, frameworks to include, and paths to exclude.
|
||||
|
||||
#### Scan a Local Directory (default)
|
||||
|
||||
```sh
|
||||
prowler iac --scan-path ./my-iac-directory
|
||||
```
|
||||
|
||||
### Scan a Remote GitHub Repository
|
||||
#### Scan a Remote GitHub Repository
|
||||
|
||||
```sh
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git
|
||||
```
|
||||
|
||||
#### Authentication for Remote Private Repositories
|
||||
##### Authentication for Remote Private Repositories
|
||||
|
||||
You can provide authentication for private repositories using one of the following methods:
|
||||
Authentication for private repositories can be provided using one of the following methods:
|
||||
|
||||
- **GitHub Username and Personal Access Token (PAT):**
|
||||
```sh
|
||||
@@ -52,12 +55,12 @@ You can provide authentication for private repositories using one of the followi
|
||||
- If not provided via CLI, the following environment variables will be used (in order of precedence):
|
||||
- `GITHUB_OAUTH_APP_TOKEN`
|
||||
- `GITHUB_USERNAME` and `GITHUB_PERSONAL_ACCESS_TOKEN`
|
||||
- If neither CLI flags nor environment variables are set, the scan will attempt to clone without authentication or using the provided in the [git URL](https://git-scm.com/docs/git-clone#_git_urls).
|
||||
- If neither CLI flags nor environment variables are set, the scan will attempt to clone without authentication or using the credentials provided in the [git URL](https://git-scm.com/docs/git-clone#_git_urls).
|
||||
|
||||
#### Mutually Exclusive Flags
|
||||
##### Mutually Exclusive Flags
|
||||
- `--scan-path` and `--scan-repository-url` are mutually exclusive. Only one can be specified at a time.
|
||||
|
||||
### Specify Scanners
|
||||
#### Specify Scanners
|
||||
|
||||
Scan only vulnerability and misconfiguration scanners:
|
||||
|
||||
@@ -65,24 +68,16 @@ Scan only vulnerability and misconfiguration scanners:
|
||||
prowler iac --scan-path ./my-iac-directory --scanners vuln misconfig
|
||||
```
|
||||
|
||||
### Exclude Paths
|
||||
#### Exclude Paths
|
||||
|
||||
```sh
|
||||
prowler iac --scan-path ./my-iac-directory --exclude-path ./my-iac-directory/test,./my-iac-directory/examples
|
||||
```
|
||||
|
||||
## Output
|
||||
### Output
|
||||
|
||||
You can use the standard Prowler output options, for example:
|
||||
Use the standard Prowler output options, for example:
|
||||
|
||||
```sh
|
||||
prowler iac --scan-path ./iac --output-formats csv json html
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- The IaC provider does not require cloud authentication for local scans.
|
||||
- For remote repository scans, authentication is optional but required for private repos.
|
||||
- CLI flags override environment variables for authentication.
|
||||
- It is ideal for CI/CD pipelines and local development environments.
|
||||
- For more details on supported scanners, see the [Trivy documentation](https://trivy.dev/latest/docs/scanner/vulnerability/).
|
||||
|
||||
|
After Width: | Height: | Size: 145 KiB |
|
After Width: | Height: | Size: 422 KiB |
|
After Width: | Height: | Size: 584 KiB |
@@ -0,0 +1,142 @@
|
||||
# Getting Started With LLM on Prowler
|
||||
|
||||
## Overview
|
||||
|
||||
Prowler's LLM provider enables comprehensive security testing of large language models using red team techniques. It integrates with [promptfoo](https://promptfoo.dev/) to provide extensive security evaluation capabilities.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before using the LLM provider, ensure the following requirements are met:
|
||||
|
||||
- **promptfoo installed**: The LLM provider requires promptfoo to be installed on the system
|
||||
- **LLM API access**: Valid API keys for the target LLM models to test
|
||||
- **Email verification**: promptfoo requires email verification for red team evaluations
|
||||
|
||||
## Installation
|
||||
|
||||
### Install promptfoo
|
||||
|
||||
Install promptfoo using one of the following methods:
|
||||
|
||||
**Using npm:**
|
||||
```bash
|
||||
npm install -g promptfoo
|
||||
```
|
||||
|
||||
**Using Homebrew (macOS):**
|
||||
```bash
|
||||
brew install promptfoo
|
||||
```
|
||||
|
||||
**Using other package managers:**
|
||||
See the [promptfoo installation guide](https://promptfoo.dev/docs/installation/) for additional installation methods.
|
||||
|
||||
### Verify Installation
|
||||
|
||||
```bash
|
||||
promptfoo --version
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Step 1: Email Verification
|
||||
|
||||
promptfoo requires email verification for red team evaluations. Set the email address:
|
||||
|
||||
```bash
|
||||
promptfoo config set email your-email@company.com
|
||||
```
|
||||
|
||||
### Step 2: Configure LLM API Keys
|
||||
|
||||
Set up API keys for the target LLM models. For OpenAI (default configuration):
|
||||
|
||||
```bash
|
||||
export OPENAI_API_KEY="your-openai-api-key"
|
||||
```
|
||||
|
||||
For other providers, see the [promptfoo documentation](https://promptfoo.dev/docs/providers/) for specific configuration requirements.
|
||||
|
||||
### Step 3: Generate Test Cases (Optional)
|
||||
|
||||
Prowler provides a default suite of red team tests but to customize the test cases, generate them first:
|
||||
|
||||
```bash
|
||||
promptfoo redteam generate
|
||||
```
|
||||
|
||||
This creates test cases based on your configuration.
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Usage
|
||||
|
||||
Run LLM security testing with the default configuration:
|
||||
|
||||
```bash
|
||||
prowler llm
|
||||
```
|
||||
|
||||
### Custom Configuration
|
||||
|
||||
Use a custom promptfoo configuration file:
|
||||
|
||||
```bash
|
||||
prowler llm --config-path /path/to/your/config.yaml
|
||||
```
|
||||
|
||||
### Output Options
|
||||
|
||||
Generate reports in various formats:
|
||||
|
||||
```bash
|
||||
# JSON output
|
||||
prowler llm --output-format json
|
||||
|
||||
# CSV output
|
||||
prowler llm --output-format csv
|
||||
|
||||
# HTML report
|
||||
prowler llm --output-format html
|
||||
```
|
||||
|
||||
### Concurrency Control
|
||||
|
||||
Adjust the number of concurrent tests:
|
||||
|
||||
```bash
|
||||
prowler llm --max-concurrency 5
|
||||
```
|
||||
|
||||
## Default Configuration
|
||||
|
||||
Prowler includes a comprehensive default LLM configuration that provides:
|
||||
|
||||
- **Target Models**: OpenAI GPT models by default
|
||||
- **Security Frameworks**:
|
||||
- OWASP LLM Top 10
|
||||
- OWASP API Top 10
|
||||
- MITRE ATLAS
|
||||
- NIST AI Risk Management Framework
|
||||
- EU AI Act compliance
|
||||
- **Test Coverage**: Over 5,000 security test cases
|
||||
- **Plugin Support**: Multiple security testing plugins
|
||||
|
||||
## Advanced Configuration
|
||||
|
||||
### Custom Test Suites
|
||||
|
||||
Create custom test configurations by modifying the promptfoo config file in `prowler/config/llm_config.yaml` or pass a custom configuration with `--config-file` flag:
|
||||
|
||||
```yaml
|
||||
description: Custom LLM Security Tests
|
||||
targets:
|
||||
- id: openai:gpt-4
|
||||
redteam:
|
||||
plugins:
|
||||
- id: owasp:llm
|
||||
numTests: 10
|
||||
- id: mitre:atlas
|
||||
numTests: 5
|
||||
```
|
||||
|
||||
@@ -1,20 +1,181 @@
|
||||
# Microsoft 365 Authentication for Prowler
|
||||
# Microsoft 365 Authentication in Prowler
|
||||
|
||||
Prowler for Microsoft 365 (M365) supports the following authentication methods:
|
||||
Prowler for Microsoft 365 supports multiple authentication types. Authentication methods vary between Prowler App and Prowler CLI:
|
||||
|
||||
- [**Service Principal Application**](https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals?tabs=browser#service-principal-object) (**Recommended**)
|
||||
- **Service Principal Application with Microsoft User Credentials**
|
||||
- **Stored AZ CLI credentials**
|
||||
- **Interactive browser authentication**
|
||||
**Prowler App:**
|
||||
|
||||
- [**Service Principal Application**](#service-principal-authentication-recommended) (**Recommended**)
|
||||
- [**Service Principal with User Credentials**](#service-principal-and-user-credentials-authentication) (Being deprecated)
|
||||
|
||||
**Prowler CLI:**
|
||||
|
||||
- [**Service Principal Application**](#service-principal-authentication-recommended) (**Recommended**)
|
||||
- [**Service Principal with User Credentials**](#service-principal-and-user-credentials-authentication) (Being deprecated)
|
||||
- [**Interactive browser authentication**](#interactive-browser-authentication)
|
||||
|
||||
???+ warning
|
||||
Prowler App supports the **Service Principal** authentication method and the **Service Principal with User Credentials** authentication method, but this last one will be deprecated in October once Microsoft will enforce MFA in all tenants not allowing User authentication without interactive method.
|
||||
The Service Principal with User Credentials method will be deprecated in October 2025 when Microsoft enforces MFA in all tenants, which will not allow user authentication without interactive methods.
|
||||
|
||||
### Service Principal Authentication (Recommended)
|
||||
## Required Permissions
|
||||
|
||||
**Authentication flag:** `--sp-env-auth`
|
||||
To run the full Prowler provider, including PowerShell checks, two types of permission scopes must be set in **Microsoft Entra ID**.
|
||||
|
||||
Enable Prowler authentication as the **Service Principal Application** by configuring the following environment variables:
|
||||
### Service Principal Authentication Permissions (Recommended)
|
||||
|
||||
When using service principal authentication, add these **Application Permissions**:
|
||||
|
||||
**Microsoft Graph API Permissions:**
|
||||
|
||||
- `AuditLog.Read.All`: Required for Entra service.
|
||||
- `Directory.Read.All`: Required for all services.
|
||||
- `Policy.Read.All`: Required for all services.
|
||||
- `SharePointTenantSettings.Read.All`: Required for SharePoint service.
|
||||
- `User.Read` (IMPORTANT: this must be set as **delegated**): Required for the sign-in.
|
||||
|
||||
**External API Permissions:**
|
||||
|
||||
- `Exchange.ManageAsApp` from external API `Office 365 Exchange Online`: Required for Exchange PowerShell module app authentication. The `Global Reader` role must also be assigned to the app.
|
||||
- `application_access` from external API `Skype and Teams Tenant Admin API`: Required for Teams PowerShell module app authentication.
|
||||
|
||||
???+ note
|
||||
`Directory.Read.All` can be replaced with `Domain.Read.All` for more restrictive permissions, but Entra checks related to DirectoryRoles and GetUsers will not run. If using this option, you must also add the `Organization.Read.All` permission to the service principal application for authentication.
|
||||
|
||||
???+ note
|
||||
This is the **recommended authentication method** because it allows running the full M365 provider including PowerShell checks, providing complete coverage of all available security checks.
|
||||
|
||||
### Service Principal + User Credentials Authentication Permissions
|
||||
|
||||
When using service principal with user credentials authentication, you need **both** sets of permissions:
|
||||
|
||||
**1. Service Principal Application Permissions**:
|
||||
|
||||
- All the Microsoft Graph API permissions listed above are required.
|
||||
- External API permissions listed above are **not needed**.
|
||||
|
||||
**2. User-Level Permissions**: These are set at the `M365_USER` level, so the user used to run Prowler must have one of the following roles:
|
||||
|
||||
- `Global Reader` (recommended): Allows reading all required information.
|
||||
- `Exchange Administrator` and `Teams Administrator`: User needs both roles for the same access as Global Reader.
|
||||
|
||||
### Browser Authentication Permissions
|
||||
|
||||
When using browser authentication, permissions are delegated to the user, so the user must have the appropriate permissions rather than the application.
|
||||
|
||||
???+ warning
|
||||
With browser authentication, you will only be able to run checks that work through MS Graph API. PowerShell module checks will not be executed.
|
||||
|
||||
### Step-by-Step Permission Assignment
|
||||
|
||||
#### Create Service Principal Application
|
||||
|
||||
1. Access **Microsoft Entra ID**
|
||||
|
||||

|
||||
|
||||
2. Navigate to "Applications" > "App registrations"
|
||||
|
||||

|
||||
|
||||
3. Click "+ New registration", complete the form, and click "Register"
|
||||
|
||||

|
||||
|
||||
4. Go to "Certificates & secrets" > "Client secrets" > "+ New client secret"
|
||||
|
||||

|
||||
|
||||
5. Fill in the required fields and click "Add", then copy the generated value (this will be `AZURE_CLIENT_SECRET`)
|
||||
|
||||

|
||||
|
||||
#### Grant Microsoft Graph API Permissions
|
||||
|
||||
1. Go to App Registration > Select your Prowler App > click on "API permissions"
|
||||
|
||||

|
||||
|
||||
2. Click "+ Add a permission" > "Microsoft Graph" > "Application permissions"
|
||||
|
||||

|
||||
|
||||
3. Search and select the required permissions:
|
||||
- `AuditLog.Read.All`: Required for Entra service
|
||||
- `Directory.Read.All`: Required for all services
|
||||
- `Policy.Read.All`: Required for all services
|
||||
- `SharePointTenantSettings.Read.All`: Required for SharePoint service
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
4. Click "Add permissions", then click "Grant admin consent for <your-tenant-name>"
|
||||
|
||||
#### Grant PowerShell Module Permissions (For Service Principal Authentication)
|
||||
|
||||
1. **Add Exchange API:**
|
||||
|
||||
- Search and select "Office 365 Exchange Online" API in **APIs my organization uses**
|
||||
|
||||

|
||||
|
||||
- Select "Exchange.ManageAsApp" permission and click "Add permissions"
|
||||
|
||||

|
||||
|
||||
- Assign `Global Reader` role to the app: Go to `Roles and administrators` > click `here` for directory level assignment
|
||||
|
||||

|
||||
|
||||
- Search for `Global Reader` and assign it to your application
|
||||
|
||||

|
||||
|
||||
2. **Add Teams API:**
|
||||
|
||||
- Search and select "Skype and Teams Tenant Admin API" in **APIs my organization uses**
|
||||
|
||||

|
||||
|
||||
- Select "application_access" permission and click "Add permissions"
|
||||
|
||||

|
||||
|
||||
3. Click "Grant admin consent for <your-tenant-name>" to grant admin consent
|
||||
|
||||

|
||||
|
||||
#### Assign User Roles (For User Authentication)
|
||||
|
||||
When using Service Principal with User Credentials authentication, assign the following roles to the user:
|
||||
|
||||
1. Go to Users > All Users > Click on the email for the user
|
||||
|
||||

|
||||
|
||||
2. Click "Assigned Roles"
|
||||
|
||||

|
||||
|
||||
3. Click "Add assignments", then search and select:
|
||||
|
||||
- `Global Reader` (recommended)
|
||||
- OR `Exchange Administrator` and `Teams Administrator` (both required)
|
||||
|
||||

|
||||
|
||||
4. Click next, assign the role as "Active", and click "Assign"
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## Service Principal Authentication (Recommended)
|
||||
|
||||
*Available for both Prowler App and Prowler CLI*
|
||||
|
||||
**Authentication flag for CLI:** `--sp-env-auth`
|
||||
|
||||
Authenticate using the **Service Principal Application** by configuring the following environment variables:
|
||||
|
||||
```console
|
||||
export AZURE_CLIENT_ID="XXXXXXXXX"
|
||||
@@ -24,21 +185,23 @@ export AZURE_TENANT_ID="XXXXXXXXX"
|
||||
|
||||
If these variables are not set or exported, execution using `--sp-env-auth` will fail.
|
||||
|
||||
Refer to the [Create Prowler Service Principal](getting-started-m365.md#create-the-service-principal-app) guide for setup instructions.
|
||||
Refer to the [Step-by-Step Permission Assignment](#step-by-step-permission-assignment) section below for setup instructions.
|
||||
|
||||
If the external API permissions described in the mentioned section above are not added only checks that work through MS Graph will be executed. This means that the full provider will not be executed.
|
||||
|
||||
???+ note
|
||||
In order to scan all the checks from M365 required permissions to the service principal application must be added. Refer to the [External API Permissions Assignment](getting-started-m365.md#grant-powershell-modules-permissions) section for more information.
|
||||
In order to scan all the checks from M365 required permissions to the service principal application must be added. Refer to the [PowerShell Module Permissions](#grant-powershell-module-permissions-for-service-principal-authentication) section for more information.
|
||||
|
||||
### Service Principal and User Credentials Authentication
|
||||
## Service Principal and User Credentials Authentication
|
||||
|
||||
Authentication flag: `--env-auth`
|
||||
*Available for both Prowler App and Prowler CLI*
|
||||
|
||||
**Authentication flag for CLI:** `--env-auth`
|
||||
|
||||
???+ warning
|
||||
This method is not recommended anymore, we recommend just use the **Service Principal Application** authentication method instead.
|
||||
This method is not recommended and will be deprecated in October 2025. Use the **Service Principal Application** authentication method instead.
|
||||
|
||||
This method builds upon the Service Principal authentication by adding User Credentials. Configure the following environment variables: `M365_USER` and `M365_PASSWORD`.
|
||||
This method builds upon Service Principal authentication by adding User Credentials. Configure the following environment variables: `M365_USER` and `M365_PASSWORD`.
|
||||
|
||||
```console
|
||||
export AZURE_CLIENT_ID="XXXXXXXXX"
|
||||
@@ -72,74 +235,19 @@ These two new environment variables are **required** in this authentication meth
|
||||
|
||||
|
||||
|
||||
### Interactive Browser Authentication
|
||||
## Interactive Browser Authentication
|
||||
|
||||
*Available only for Prowler CLI*
|
||||
|
||||
**Authentication flag:** `--browser-auth`
|
||||
|
||||
This authentication method requires authentication against Azure using the default browser to start the scan. The `--tenant-id` flag is also required.
|
||||
Authenticate against Azure using the default browser to start the scan. The `--tenant-id` flag is also required.
|
||||
|
||||
These credentials only enable checks that rely on Microsoft Graph. The entire provider cannot be run with this method. To perform a full M365 security scan, use the **recommended authentication method**.
|
||||
|
||||
Since this is a **delegated permission** authentication method, necessary permissions should be assigned to the user rather than the application.
|
||||
|
||||
### Required Permissions
|
||||
|
||||
To run the full Prowler provider, including PowerShell checks, two types of permission scopes must be set in **Microsoft Entra ID**.
|
||||
|
||||
#### Service Principal Authentication (`--sp-env-auth`) - Recommended
|
||||
|
||||
When using service principal authentication, add the following **Application Permissions**:
|
||||
|
||||
**Microsoft Graph API Permissions:**
|
||||
|
||||
- `AuditLog.Read.All`: Required for Entra service.
|
||||
- `Directory.Read.All`: Required for all services.
|
||||
- `Policy.Read.All`: Required for all services.
|
||||
- `SharePointTenantSettings.Read.All`: Required for SharePoint service.
|
||||
- `User.Read` (IMPORTANT: this must be set as **delegated**): Required for the sign-in.
|
||||
|
||||
**External API Permissions:**
|
||||
|
||||
- `Exchange.ManageAsApp` from external API `Office 365 Exchange Online`: Required for Exchange PowerShell module app authentication. You also need to assign the `Global Reader` role to the app.
|
||||
- `application_access` from external API `Skype and Teams Tenant Admin API`: Required for Teams PowerShell module app authentication.
|
||||
|
||||
???+ note
|
||||
`Directory.Read.All` can be replaced with `Domain.Read.All` that is a more restrictive permission but you won't be able to run the Entra checks related with DirectoryRoles and GetUsers.
|
||||
|
||||
> If you do this you will need to add also the `Organization.Read.All` permission to the service principal application in order to authenticate.
|
||||
|
||||
???+ note
|
||||
This is the **recommended authentication method** because it allows you to run the full M365 provider including PowerShell checks, providing complete coverage of all available security checks, same as the Service Principal Authentication + User Credentials Authentication but this last one will be deprecated in October once Microsoft will enforce MFA in all tenants not allowing User authentication without interactive method.
|
||||
|
||||
|
||||
#### Service Principal + User Credentials Authentication (`--env-auth`)
|
||||
|
||||
When using service principal with user credentials authentication, you need **both** sets of permissions:
|
||||
|
||||
**1. Service Principal Application Permissions**:
|
||||
- You **will need** all the Microsoft Graph API permissions listed above.
|
||||
- You **won't need** the External API permissions listed above.
|
||||
|
||||
**2. User-Level Permissions**: These are set at the `M365_USER` level, so the user used to run Prowler must have one of the following roles:
|
||||
|
||||
- `Global Reader` (recommended): this allows you to read all roles needed.
|
||||
- `Exchange Administrator` and `Teams Administrator`: user needs both roles but with this [roles](https://learn.microsoft.com/en-us/exchange/permissions-exo/permissions-exo#microsoft-365-permissions-in-exchange-online) you can access to the same information as a Global Reader (since only read access is needed, Global Reader is recommended).
|
||||
|
||||
|
||||
#### Browser Authentication (`--browser-auth`)
|
||||
|
||||
When using browser authentication, permissions are delegated to the user, so the user must have the appropriate permissions rather than the application.
|
||||
|
||||
???+ warning
|
||||
With browser authentication, you will only be able to run checks that work through MS Graph API. PowerShell module checks will not be executed.
|
||||
|
||||
### Assigning Permissions and Roles
|
||||
|
||||
For guidance on assigning the necessary permissions and roles, follow these instructions:
|
||||
- [Grant API Permissions](getting-started-m365.md#grant-required-graph-api-permissions)
|
||||
- [Assign Required Roles](getting-started-m365.md#if-using-user-authentication)
|
||||
|
||||
### Supported PowerShell Versions
|
||||
## Supported PowerShell Versions
|
||||
|
||||
PowerShell is required to run certain M365 checks.
|
||||
|
||||
@@ -156,26 +264,32 @@ PowerShell is required to run certain M365 checks.
|
||||
|
||||
### Installing PowerShell
|
||||
|
||||
Installing PowerShell is different depending on your OS.
|
||||
Installing PowerShell is different depending on your OS:
|
||||
|
||||
- [Windows](https://learn.microsoft.com/es-es/powershell/scripting/install/installing-powershell-on-windows?view=powershell-7.5#install-powershell-using-winget-recommended): you will need to update PowerShell to +7.4 to be able to run prowler, if not some checks will not show findings and the provider could not work as expected. This version of PowerShell is [supported](https://learn.microsoft.com/es-es/powershell/scripting/install/installing-powershell-on-windows?view=powershell-7.4#supported-versions-of-windows) on Windows 10, Windows 11, Windows Server 2016 and higher versions.
|
||||
=== "Windows"
|
||||
|
||||
```console
|
||||
winget install --id Microsoft.PowerShell --source winget
|
||||
```
|
||||
[Windows](https://learn.microsoft.com/es-es/powershell/scripting/install/installing-powershell-on-windows?view=powershell-7.5#install-powershell-using-winget-recommended): PowerShell must be updated to version 7.4+ for Prowler to function properly. Otherwise, some checks will not show findings and the provider may not function properly. This version of PowerShell is [supported](https://learn.microsoft.com/es-es/powershell/scripting/install/installing-powershell-on-windows?view=powershell-7.4#supported-versions-of-windows) on Windows 10, Windows 11, Windows Server 2016 and higher versions.
|
||||
|
||||
```console
|
||||
winget install --id Microsoft.PowerShell --source winget
|
||||
```
|
||||
|
||||
- [MacOS](https://learn.microsoft.com/es-es/powershell/scripting/install/installing-powershell-on-macos?view=powershell-7.5#install-the-latest-stable-release-of-powershell): installing PowerShell on MacOS needs to have installed [brew](https://brew.sh/), once you have it is just running the command above, Pwsh is only supported in macOS 15 (Sequoia) x64 and Arm64, macOS 14 (Sonoma) x64 and Arm64, macOS 13 (Ventura) x64 and Arm64
|
||||
=== "MacOS"
|
||||
|
||||
```console
|
||||
brew install powershell/tap/powershell
|
||||
```
|
||||
[MacOS](https://learn.microsoft.com/es-es/powershell/scripting/install/installing-powershell-on-macos?view=powershell-7.5#install-the-latest-stable-release-of-powershell): installing PowerShell on MacOS needs to have installed [brew](https://brew.sh/), once installed, simply run the command shown above, Pwsh is only supported in macOS 15 (Sequoia) x64 and Arm64, macOS 14 (Sonoma) x64 and Arm64, macOS 13 (Ventura) x64 and Arm64
|
||||
|
||||
Once it's installed run `pwsh` on your terminal to verify it's working.
|
||||
```console
|
||||
brew install powershell/tap/powershell
|
||||
```
|
||||
|
||||
- Linux: installing PowerShell on Linux depends on the distro you are using:
|
||||
Once it's installed run `pwsh` on your terminal to verify it's working.
|
||||
|
||||
- [Ubuntu](https://learn.microsoft.com/es-es/powershell/scripting/install/install-ubuntu?view=powershell-7.5#installation-via-package-repository-the-package-repository): The required version for installing PowerShell +7.4 on Ubuntu are Ubuntu 22.04 and Ubuntu 24.04. The recommended way to install it is downloading the package available on PMC. You just need to follow the following steps:
|
||||
=== "Linux (Ubuntu)"
|
||||
|
||||
[Ubuntu](https://learn.microsoft.com/es-es/powershell/scripting/install/install-ubuntu?view=powershell-7.5#installation-via-package-repository-the-package-repository): The required version for installing PowerShell +7.4 on Ubuntu are Ubuntu 22.04 and Ubuntu 24.04.
|
||||
The recommended way to install it is downloading the package available on PMC.
|
||||
|
||||
Follow these steps:
|
||||
|
||||
```console
|
||||
###################################
|
||||
@@ -210,7 +324,11 @@ Once it's installed run `pwsh` on your terminal to verify it's working.
|
||||
pwsh
|
||||
```
|
||||
|
||||
- [Alpine](https://learn.microsoft.com/es-es/powershell/scripting/install/install-alpine?view=powershell-7.5#installation-steps): The only supported version for installing PowerShell +7.4 on Alpine is Alpine 3.20. The unique way to install it is downloading the tar.gz package available on [PowerShell github](https://github.com/PowerShell/PowerShell/releases/download/v7.5.0/powershell-7.5.0-linux-musl-x64.tar.gz). You just need to follow the following steps:
|
||||
=== "Linux (Alpine)"
|
||||
|
||||
[Alpine](https://learn.microsoft.com/es-es/powershell/scripting/install/install-alpine?view=powershell-7.5#installation-steps): The only supported version for installing PowerShell +7.4 on Alpine is Alpine 3.20. The unique way to install it is downloading the tar.gz package available on [PowerShell github](https://github.com/PowerShell/PowerShell/releases/download/v7.5.0/powershell-7.5.0-linux-musl-x64.tar.gz).
|
||||
|
||||
Follow these steps:
|
||||
|
||||
```console
|
||||
# Install the requirements
|
||||
@@ -252,7 +370,11 @@ Once it's installed run `pwsh` on your terminal to verify it's working.
|
||||
pwsh
|
||||
```
|
||||
|
||||
- [Debian](https://learn.microsoft.com/es-es/powershell/scripting/install/install-debian?view=powershell-7.5#installation-on-debian-11-or-12-via-the-package-repository): The required version for installing PowerShell +7.4 on Debian are Debian 11 and Debian 12. The recommended way to install it is downloading the package available on PMC. You just need to follow the following steps:
|
||||
=== "Linux (Debian)"
|
||||
|
||||
[Debian](https://learn.microsoft.com/es-es/powershell/scripting/install/install-debian?view=powershell-7.5#installation-on-debian-11-or-12-via-the-package-repository): The required version for installing PowerShell +7.4 on Debian are Debian 11 and Debian 12. The recommended way to install it is downloading the package available on PMC.
|
||||
|
||||
Follow these steps:
|
||||
|
||||
```console
|
||||
###################################
|
||||
@@ -287,7 +409,12 @@ Once it's installed run `pwsh` on your terminal to verify it's working.
|
||||
pwsh
|
||||
```
|
||||
|
||||
- [Rhel](https://learn.microsoft.com/es-es/powershell/scripting/install/install-rhel?view=powershell-7.5#installation-via-the-package-repository): The required version for installing PowerShell +7.4 on Red Hat are RHEL 8 and RHEL 9. The recommended way to install it is downloading the package available on PMC. You just need to follow the following steps:
|
||||
|
||||
=== "Linux (RHEL)"
|
||||
|
||||
[Rhel](https://learn.microsoft.com/es-es/powershell/scripting/install/install-rhel?view=powershell-7.5#installation-via-the-package-repository): The required version for installing PowerShell +7.4 on Red Hat are RHEL 8 and RHEL 9. The recommended way to install it is downloading the package available on PMC.
|
||||
|
||||
Follow these steps:
|
||||
|
||||
```console
|
||||
###################################
|
||||
@@ -317,7 +444,9 @@ Once it's installed run `pwsh` on your terminal to verify it's working.
|
||||
sudo dnf install powershell -y
|
||||
```
|
||||
|
||||
- [Docker](https://learn.microsoft.com/es-es/powershell/scripting/install/powershell-in-docker?view=powershell-7.5#use-powershell-in-a-container): The following command download the latest stable versions of PowerShell:
|
||||
=== "Docker"
|
||||
|
||||
[Docker](https://learn.microsoft.com/es-es/powershell/scripting/install/powershell-in-docker?view=powershell-7.5#use-powershell-in-a-container): The following command download the latest stable versions of PowerShell:
|
||||
|
||||
```console
|
||||
docker pull mcr.microsoft.com/dotnet/sdk:9.0
|
||||
@@ -329,6 +458,7 @@ Once it's installed run `pwsh` on your terminal to verify it's working.
|
||||
docker run -it mcr.microsoft.com/dotnet/sdk:9.0 pwsh
|
||||
```
|
||||
|
||||
|
||||
### Required PowerShell Modules
|
||||
|
||||
Prowler relies on several PowerShell cmdlets to retrieve necessary data.
|
||||
|
||||
@@ -1,275 +1,105 @@
|
||||
# Getting Started with M365 on Prowler Cloud/App
|
||||
|
||||
Set up your M365 account to enable security scanning using Prowler Cloud/App.
|
||||
# Getting Started With Microsoft 365 on Prowler
|
||||
|
||||
???+ note "Government Cloud Support"
|
||||
Government cloud accounts or tenants (Microsoft 365 Government) are not currently supported, but we expect to add support for them in the near future.
|
||||
Government cloud accounts or tenants (Microsoft 365 Government) are currently unsupported, but we expect to add support for them in the near future.
|
||||
|
||||
## Requirements
|
||||
## Prerequisites
|
||||
|
||||
To configure your M365 account, you'll need:
|
||||
Configure authentication for Microsoft 365 by following the [Microsoft 365 Authentication](authentication.md) guide. This includes:
|
||||
|
||||
1. Obtain a domain from the Entra ID portal.
|
||||
- Creating a Service Principal Application
|
||||
- Granting required Microsoft Graph API permissions
|
||||
- Setting up PowerShell module permissions (for full security coverage)
|
||||
- Assigning appropriate roles to users (if using user authentication)
|
||||
|
||||
2. Access Prowler Cloud/App and add a new cloud provider `Microsoft 365`.
|
||||
## Prowler App
|
||||
|
||||
3. Configure your M365 account:
|
||||
### Step 1: Obtain Domain ID
|
||||
|
||||
3.1 Create the Service Principal app.
|
||||
1. Go to the Entra ID portal, then search for "Domain" or go to Identity > Settings > Domain Names
|
||||
|
||||
3.2 Grant the required API permissions.
|
||||

|
||||
|
||||
3.3 Assign the required roles to your user.
|
||||

|
||||
|
||||
4. Add the credentials to Prowler Cloud/App.
|
||||
2. Select the domain to use as unique identifier for the Microsoft 365 account in Prowler App
|
||||
|
||||
## Step 1: Obtain your Domain
|
||||
|
||||
Go to the Entra ID portal, then you can search for `Domain` or go to Identity > Settings > Domain Names.
|
||||
|
||||

|
||||
|
||||
<br>
|
||||
|
||||

|
||||
|
||||
Once you are there just select the domain you want to use as unique identifier for your M365 account in Prowler Cloud/App.
|
||||
|
||||
---
|
||||
|
||||
## Step 2: Access Prowler Cloud/App
|
||||
### Step 2: Access Prowler App
|
||||
|
||||
1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](../prowler-app.md)
|
||||
2. Navigate to `Configuration` > `Cloud Providers`
|
||||
2. Navigate to "Configuration" > "Cloud Providers"
|
||||
|
||||

|
||||
|
||||
3. Click on `Add Cloud Provider`
|
||||
3. Click on "Add Cloud Provider"
|
||||
|
||||

|
||||
|
||||
4. Select `Microsoft 365`
|
||||
4. Select "Microsoft 365"
|
||||
|
||||

|
||||
|
||||
5. Add the Domain ID and an optional alias, then click `Next`
|
||||
5. Add the Domain ID and an optional alias, then click "Next"
|
||||
|
||||

|
||||
|
||||
---
|
||||
### Step 3: Add Credentials to Prowler App
|
||||
|
||||
## Step 3: Configure your M365 account
|
||||
|
||||
|
||||
### Create the Service Principal app
|
||||
|
||||
A Service Principal is required to grant Prowler the necessary privileges.
|
||||
|
||||
1. Access **Microsoft Entra ID**
|
||||
|
||||

|
||||
|
||||
2. Navigate to `Applications` > `App registrations`
|
||||
|
||||

|
||||
|
||||
3. Click `+ New registration`, complete the form, and click `Register`
|
||||
|
||||

|
||||
|
||||
4. Go to `Certificates & secrets` > `Client secrets` > `+ New client secret`
|
||||
|
||||

|
||||
|
||||
5. Fill in the required fields and click `Add`, then copy the generated `value` (that value will be `AZURE_CLIENT_SECRET`)
|
||||
|
||||

|
||||
|
||||
With this done you will have all the needed keys, summarized in the following table
|
||||
|
||||
| Value | Description |
|
||||
|-------|-------------|
|
||||
| Client ID | Application (client) ID |
|
||||
| Client Secret | AZURE_CLIENT_SECRET |
|
||||
| Tenant ID | Directory (tenant) ID |
|
||||
|
||||
---
|
||||
|
||||
### Grant required Graph API permissions
|
||||
|
||||
Assign the following Microsoft Graph permissions:
|
||||
|
||||
- `AuditLog.Read.All`: Required for Entra service.
|
||||
- `Directory.Read.All`: Required for all services.
|
||||
- `Policy.Read.All`: Required for all services.
|
||||
- `SharePointTenantSettings.Read.All`: Required for SharePoint service.
|
||||
- `User.Read` (IMPORTANT: this is set as **delegated**): Required for the sign-in only if using user authentication.
|
||||
|
||||
???+ note
|
||||
You can replace `Directory.Read.All` with `Domain.Read.All` is a more restrictive permission but you won't be able to run the Entra checks related with DirectoryRoles and GetUsers.
|
||||
|
||||
> If you do this you will need to add also the `Organization.Read.All` permission to the service principal application in order to authenticate.
|
||||
|
||||
Follow these steps to assign the permissions:
|
||||
|
||||
1. Go to your App Registration > Select your Prowler App created before > click on `API permissions`
|
||||
|
||||

|
||||
|
||||
2. Click `+ Add a permission` > `Microsoft Graph` > `Application permissions`
|
||||
|
||||

|
||||
|
||||
3. Search and select every permission below and once all are selected click on `Add permissions`:
|
||||
- `AuditLog.Read.All`: Required for Entra service.
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `SharePointTenantSettings.Read.All`
|
||||
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
|
||||
### Grant PowerShell modules permissions
|
||||
|
||||
The permissions you need to grant depends on whether you are using user credentials or service principal to authenticate to the M365 modules.
|
||||
|
||||
???+ warning "Warning"
|
||||
Make sure you add the correct set of permissions for the authentication method you are using.
|
||||
|
||||
|
||||
#### If using application(service principal) authentication (Recommended)
|
||||
|
||||
To grant the permissions for the PowerShell modules via application authentication, you need to add the necessary APIs to your app registration. All of this assignments are done through Entra ID.
|
||||
|
||||
???+ warning "Warning"
|
||||
You need to have a license that allows you to use the APIs.
|
||||
|
||||
1. Add Exchange API:
|
||||
|
||||
- Search and select`Office 365 Exchange Online` API in **APIs my organization uses**.
|
||||
|
||||

|
||||
|
||||
- Select `Exchange.ManageAsApp` permission and click on `Add permissions`.
|
||||
|
||||

|
||||
|
||||
You also need to assign the `Global Reader` role to the app. For that go to `Roles and administrators` and in the `Administrative roles` section click `here` to go to the directory level assignment:
|
||||
|
||||

|
||||
|
||||
Once in the directory level assignment, search for `Global Reader` and click on it to open the assginments page of that role.
|
||||
|
||||

|
||||
|
||||
Click on `Add assignments`, search for your app and click on `Assign`.
|
||||
|
||||
You have to select it as `Active` and click on `Assign` to assign the role to the app.
|
||||
|
||||

|
||||
|
||||
For more information about the need of adding this role, see [Microsoft documentation](https://learn.microsoft.com/en-us/powershell/exchange/app-only-auth-powershell-v2?view=exchange-ps#step-5-assign-microsoft-entra-roles-to-the-application). You can select any other role of the specified.
|
||||
|
||||
2. Add Teams API:
|
||||
|
||||
- Search and select `Skype and Teams Tenant Admin API` API in **APIs my organization uses**.
|
||||
|
||||

|
||||
|
||||
- Select `application_access` permission and click on `Add permissions`.
|
||||
|
||||

|
||||
|
||||
3. Click on `Grant admin consent for <your-tenant-name>` to grant admin consent.
|
||||
|
||||

|
||||
|
||||
The final result of permission assignment should be this:
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
#### If using user authentication
|
||||
|
||||
This method is not recommended because it requires a user with MFA enabled and Microsoft will not allow MFA capable users to authenticate programmatically after 1st October 2025. See [Microsoft documentation](https://learn.microsoft.com/en-us/entra/identity/authentication/concept-mandatory-multifactor-authentication?tabs=dotnet) for more information.
|
||||
|
||||
???+ warning
|
||||
Remember that if the user is newly created, you need to sign in with that account first, as Microsoft will prompt you to change the password. If you don’t complete this step, user authentication will fail because Microsoft marks the initial password as expired.
|
||||
|
||||
|
||||
1. Search and select:
|
||||
|
||||
- `User.Read`
|
||||
|
||||

|
||||
|
||||
2. Click `Add permissions`, then **grant admin consent**
|
||||
|
||||

|
||||
|
||||
The final result of permission assignment should be this:
|
||||
|
||||

|
||||
|
||||
3. Assign **required roles** to your **user**
|
||||
|
||||
Assign one of the following roles to your User:
|
||||
|
||||
- `Global Reader` (recommended): this allows you to read all roles needed.
|
||||
- `Exchange Administrator` and `Teams Administrator`: user needs both roles but with this [roles](https://learn.microsoft.com/en-us/exchange/permissions-exo/permissions-exo#microsoft-365-permissions-in-exchange-online) you can access to the same information as a Global Reader (here you only read so that's why we recomend that role).
|
||||
|
||||
Follow these steps to assign the role:
|
||||
|
||||
1. Go to Users > All Users > Click on the email for the user you will use
|
||||
|
||||

|
||||
|
||||
2. Click `Assigned Roles`
|
||||
|
||||

|
||||
|
||||
3. Click on `Add assignments`, then search and select:
|
||||
|
||||
- `Global Reader` This is the recommended, if you want to use the others just search for them
|
||||
|
||||

|
||||
|
||||
4. Click on next, then assign the role as `Active`, and click on `Assign` to grant admin consent
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## Step 4: Add credentials to Prowler Cloud/App
|
||||
|
||||
1. Go to your App Registration overview and copy the `Client ID` and `Tenant ID`
|
||||
1. Go to App Registration overview and copy the Client ID and Tenant ID
|
||||
|
||||

|
||||
|
||||
2. Go to Prowler App and paste:
|
||||
|
||||
2. Go to Prowler Cloud/App and paste:
|
||||
- Client ID
|
||||
- Tenant ID
|
||||
- `AZURE_CLIENT_SECRET` from the Service Principal setup
|
||||
|
||||
- `Client ID`
|
||||
- `Tenant ID`
|
||||
- `AZURE_CLIENT_SECRET` from earlier
|
||||
If using user authentication, also add:
|
||||
|
||||
If you are using user authentication, also add:
|
||||
|
||||
- `M365_USER` the user using the correct assigned domain, more info [here](../../tutorials/microsoft365/authentication.md#service-principal-and-user-credentials-authentication)
|
||||
- `M365_PASSWORD` the password of the user
|
||||
- `M365_USER` (email using the assigned domain in tenant)
|
||||
- `M365_PASSWORD` (user password)
|
||||
|
||||

|
||||
|
||||
3. Click `Next`
|
||||
3. Click "Next"
|
||||
|
||||

|
||||
|
||||
4. Click `Launch Scan`
|
||||
4. Click "Launch Scan"
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
Use Prowler CLI to scan Microsoft 365 environments.
|
||||
|
||||
### PowerShell Requirements
|
||||
|
||||
PowerShell 7.4+ is required for comprehensive Microsoft 365 security coverage. Installation instructions are available in the [Authentication guide](authentication.md#supported-powershell-versions).
|
||||
|
||||
### Authentication Options
|
||||
|
||||
Select an authentication method from the [Microsoft 365 Authentication](authentication.md) guide:
|
||||
|
||||
- **Service Principal Application** (recommended): `--sp-env-auth`
|
||||
- **Service Principal with User Credentials**: `--env-auth`
|
||||
- **Interactive Browser Authentication**: `--browser-auth`
|
||||
|
||||
### Basic Usage
|
||||
|
||||
After configuring authentication, run a basic scan:
|
||||
|
||||
```console
|
||||
prowler m365 --sp-env-auth
|
||||
```
|
||||
|
||||
For comprehensive scans including PowerShell checks:
|
||||
|
||||
```console
|
||||
prowler m365 --sp-env-auth --init-modules
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -2,44 +2,49 @@
|
||||
|
||||
MongoDB Atlas provider uses [HTTP Digest Authentication with API key pairs consisting of a public key and private key](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-service).
|
||||
|
||||
## Authentication Methods
|
||||
|
||||
### Command-Line Arguments
|
||||
## Required Permissions
|
||||
|
||||
```bash
|
||||
prowler mongodbatlas --atlas-public-key <public_key> --atlas-private-key <private_key>
|
||||
```
|
||||
MongoDB Atlas API keys require appropriate permissions to perform security checks:
|
||||
|
||||
### Environment Variables
|
||||
- **Organization Read Only**: Provides read-only access to everything in the organization, including all projects in the organization.
|
||||
- To [audit the Auditing configuration for the project](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/group/endpoint-auditing), **Organization Owner** permission is required.
|
||||
|
||||
```bash
|
||||
export ATLAS_PUBLIC_KEY=<public_key>
|
||||
export ATLAS_PRIVATE_KEY=<private_key>
|
||||
prowler mongodbatlas
|
||||
```
|
||||
The IP address where Prowler runs must be added to the IP Access List of the MongoDB Atlas organization API key. To skip this step and use the API key across all IP address types, uncheck the "Require IP Access List for the Atlas Administration API" button in Organization Settings. This setting is [enabled by default](https://www.mongodb.com/docs/atlas/configure-api-access/#optional--require-an-ip-access-list-for-the-atlas-administration-api).
|
||||
|
||||
## Creating API Keys
|
||||
???+ warning
|
||||
To ensure the check `organizations_api_access_list_required` passes, enable the API access list for the organization and add the execution IP to the organization's IP Access List. When running checks from Prowler Cloud, add our IP to the IP Access List.
|
||||
|
||||
### Step-by-Step Guide
|
||||

|
||||
|
||||
1. **Log into MongoDB Atlas**
|
||||
- Access the MongoDB Atlas console
|
||||
|
||||
2. **Navigate to Access Manager**
|
||||
- Go to the organization or project access management section
|
||||
## API Key
|
||||
|
||||
3. **Select API Keys Tab**
|
||||
- Click on the "API Keys" tab
|
||||
1. **Log into MongoDB Atlas**: Access the MongoDB Atlas console
|
||||
2. **Navigate to Access Manager**: Go to the organization access management section:
|
||||
|
||||
4. **Create API Key**
|
||||
- Click "Create API Key"
|
||||
- Provide a description for the key
|
||||
- Click "Access Manager" and "Organization Access":
|
||||
|
||||
5. **Set Permissions**
|
||||
- Grant minimum required permissions
|
||||

|
||||
|
||||
6. **Save Credentials**
|
||||
- Note the public key and private key
|
||||
- Store credentials securely
|
||||
- Then click the "Applications" tab inside the Access Manager:
|
||||
|
||||
For more details about MongoDB Atlas, see the [MongoDB Atlas Tutorial](../tutorials/mongodbatlas/getting-started-mongodbatlas.md).
|
||||

|
||||
|
||||
3. **Select API Keys Tab**: Click the "API Keys" tab that appears in the image above
|
||||
|
||||
4. **Create API Key**: Click "Create API Key" and provide a description
|
||||
|
||||

|
||||
|
||||
5. **Set Permissions**: Recommend project permissions for enhanced security; modify them after creating the key
|
||||
|
||||

|
||||
|
||||
6. **Save Credentials**: Record both the public and private keys, then store them securely
|
||||
|
||||

|
||||
|
||||
7. **Add IP Access List**: Add the IP address where Prowler runs to the API Key's IP Access List. To skip this step and use the API key for all IP addresses, uncheck the "Require IP Access List for the Atlas Administration API" button in [Organization Settings](#required-permissions), though this is not recommended.
|
||||
|
||||

|
||||
|
||||