Compare commits

...

41 Commits

Author SHA1 Message Date
MrCloudSec
14a8ab3525 fix: delete comments 2025-07-29 15:42:23 +08:00
MrCloudSec
15400abbbd chore: add empty validation 2025-07-24 19:03:37 +08:00
Alejandro Bailo
cf48ae5234 Merge branch 'master' into PRWLR-7606-add-github-provider-to-ui 2025-07-24 10:39:35 +02:00
alejandrobailo
2e6c8ca892 chore: CHANGELOG.md updated 2025-07-24 10:35:23 +02:00
alejandrobailo
9a52c5125d feat: code clening and refactored 2025-07-24 10:34:31 +02:00
Andoni Alonso
95791a9909 chore(aws): replace known errors with warnings (#8347)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-07-24 15:34:45 +08:00
sumit-tft
ad0b8a4208 feat(ui): create CustomLink component and refactor links to use it (#8341)
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2025-07-23 19:10:51 +02:00
MrCloudSec
a5f8f5ea60 fix: github app id format 2025-07-23 23:32:11 +08:00
Cole Murray
5669a42039 fix(wazuh): patch command injection vulnerability in prowler-wrapper.py (#8331)
Co-authored-by: Test User <test@example.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2025-07-23 16:06:55 +02:00
Kay Agahd
83b328ea92 fix(aws): avoid false positives in SQS encryption check for ephemeral queues (#8330)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2025-07-23 21:03:02 +08:00
MrCloudSec
e08b003605 fix: add back button to forms 2025-07-23 20:26:26 +08:00
Alejandro Bailo
a6c88c0d9e test: timeout updated for E2E (#8351) 2025-07-23 13:11:32 +02:00
Sergio Garcia
922f9d2f91 docs(gcp): update GCP permissions (#8350) 2025-07-23 17:43:42 +08:00
Rubén De la Torre Vico
a69d0d16c0 fix(azure/storage): handle when Azure API set values to None (#8325)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-07-23 17:11:04 +08:00
Alejandro Bailo
676cc44fe2 feat: env keys behavior updated (#8348) 2025-07-23 10:44:28 +02:00
MrCloudSec
556a7b84a5 Merge branch 'master' into PRWLR-7606-add-github-provider-to-ui 2025-07-23 16:06:34 +08:00
Alejandro Bailo
3840e40870 test(e2e): Sign-in (#8337)
Co-authored-by: César Arroba <cesar@prowler.com>
2025-07-22 18:04:54 +02:00
dependabot[bot]
ab2d57554a chore(deps): bump form-data from 4.0.3 to 4.0.4 in /ui (#8346)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-07-22 17:53:32 +02:00
César Arroba
cbb5b21e6c chore(gha): e2e tests pipeline with API services (#8338) 2025-07-22 17:49:23 +02:00
Sergio Garcia
1efd5668ce feat(api): add GitHub provider support (#8271) 2025-07-22 23:26:02 +08:00
Sergio Garcia
ca86aeb1d7 feat(aws): new check bedrock_api_key_no_administrative_privileges (#8321) 2025-07-22 22:06:17 +08:00
Víctor Fernández Poyatos
4f2a8b71bb feat(performance): resources scenario (#8345) 2025-07-22 13:01:19 +02:00
Prowler Bot
3b0cb3db85 chore(regions_update): Changes in regions for AWS services (#8333)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2025-07-22 17:23:24 +08:00
Víctor Fernández Poyatos
00c527ff79 chore: update Prowler changelog for v5.9.2 (#8342) 2025-07-22 10:53:22 +02:00
MrCloudSec
f1277e868c chore: solve comments 2025-07-22 15:57:57 +08:00
Víctor Fernández Poyatos
ab348d5752 feat(resources): Optimize findings prefetching during resource views (#8336) 2025-07-21 16:33:07 +02:00
alejandrobailo
aef7876fed feat: code structure and behavior 2025-07-21 15:25:20 +02:00
Daniel Barranquero
dd713351dc fix(defender): avoid duplicated findings in check defender_domain_dkim_enabled (#8334) 2025-07-21 13:07:26 +02:00
sumit-tft
fa722f1dc7 feat(ui): add 32-character limit validation for scan name in create a… (#8319)
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2025-07-21 10:00:25 +02:00
Pedro Martín
b0cc3978d0 feat(docs): add info about updating Prowler App (#8320) 2025-07-21 07:44:07 +02:00
César Arroba
aa843b823c chore(gha): fix action version (#8327) 2025-07-18 15:00:32 +02:00
Víctor Fernández Poyatos
020edc0d1d fix(tasks): calculate failed findings for resources during scan (#8322) 2025-07-18 13:19:22 +02:00
César Arroba
036da81bbd chore(gha): fix api prowler version (#8323) 2025-07-18 12:43:38 +02:00
sumit-tft
4428bcb2c0 feat(ui): update step title and description in cloud provider update … (#8303)
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2025-07-18 10:11:44 +02:00
Prowler Bot
21de9a2f6f chore(release): Bump version to v5.10.0 (#8314)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2025-07-17 19:38:28 +02:00
Alejandro Bailo
231d933b9e chore(docs): SAML documentation (#8137)
Co-authored-by: Adrián Jesús Peña Rodríguez <adrianjpr@gmail.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-07-17 23:22:49 +05:45
Alejandro Bailo
2ad360a7f9 docs(ui): Mutelist documentation (#8201) 2025-07-17 23:15:20 +05:45
MrCloudSec
83f4d237c9 fix: tests 2025-07-17 16:44:53 +08:00
Sergio Garcia
3c947061c8 Merge branch 'master' into PRWLR-7606-add-github-provider-to-ui 2025-07-17 16:34:29 +08:00
MrCloudSec
900645f79b chore: add changelog 2025-07-17 16:33:45 +08:00
MrCloudSec
afd89cfb2c chore(ui): add Github provider 2025-07-17 16:17:27 +08:00
147 changed files with 4320 additions and 645 deletions

View File

@@ -19,12 +19,23 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: '3.12'
- name: Install Poetry
run: |
python3 -m pip install --user poetry
echo "$HOME/.local/bin" >> $GITHUB_PATH
- name: Parse version and determine branch
run: |
# Validate version format (reusing pattern from sdk-bump-version.yml)
@@ -107,11 +118,12 @@ jobs:
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
- name: Verify prowler dependency in api/pyproject.toml
if: ${{ env.PATCH_VERSION != '0' }}
run: |
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
if [ "$CURRENT_PROWLER_REF" != "$PROWLER_VERSION_TRIMMED" ]; then
echo "ERROR: Prowler dependency mismatch in api/pyproject.toml (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_PROWLER_REF')"
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
if [ "$CURRENT_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
echo "ERROR: Prowler dependency mismatch in api/pyproject.toml (expected: '$BRANCH_NAME_TRIMMED', found: '$CURRENT_PROWLER_REF')"
exit 1
fi
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
@@ -136,6 +148,36 @@ jobs:
fi
git checkout -b "$BRANCH_NAME"
- name: Update prowler dependency in api/pyproject.toml
if: ${{ env.PATCH_VERSION == '0' }}
run: |
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
# Minor release: update the dependency to use the new branch
echo "Minor release detected - updating prowler dependency from '$CURRENT_PROWLER_REF' to '$BRANCH_NAME_TRIMMED'"
sed -i "s|prowler @ git+https://github.com/prowler-cloud/prowler.git@[^\"]*\"|prowler @ git+https://github.com/prowler-cloud/prowler.git@$BRANCH_NAME_TRIMMED\"|" api/pyproject.toml
# Verify the change was made
UPDATED_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
if [ "$UPDATED_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
echo "ERROR: Failed to update prowler dependency in api/pyproject.toml"
exit 1
fi
# Update poetry lock file
echo "Updating poetry.lock file..."
cd api
poetry lock --no-update
cd ..
# Commit and push the changes
git add api/pyproject.toml api/poetry.lock
git commit -m "chore(api): update prowler dependency to $BRANCH_NAME_TRIMMED for release $PROWLER_VERSION"
git push origin "$BRANCH_NAME"
echo "✓ api/pyproject.toml prowler dependency updated to: $UPDATED_PROWLER_REF"
- name: Extract changelog entries
run: |
set -e

View File

@@ -52,11 +52,56 @@ jobs:
AUTH_SECRET: 'fallback-ci-secret-for-testing'
AUTH_TRUST_HOST: true
NEXTAUTH_URL: http://localhost:3000
PROWLER_API_PORT: 8080
NEXT_PUBLIC_API_BASE_URL: ${{ secrets.API_BASE_URL || 'http://localhost:8080/api/v1' }}
E2E_USER: ${{ secrets.E2E_USER }}
E2E_PASSWORD: ${{ secrets.E2E_PASSWORD }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Start needed services with docker compose
if: github.repository == 'prowler-cloud/prowler'
run: |
docker compose up -d api worker worker-beat
- name: Wait for prowler-api to respond
if: github.repository == 'prowler-cloud/prowler'
run: |
echo "Waiting for prowler-api..."
for i in {1..30}; do
if curl -s http://localhost:${PROWLER_API_PORT}/api/v1/docs >/dev/null 2>&1; then
echo "Prowler API is ready!"
break
fi
echo "Waiting for prowler-api... (attempt $i/30)"
sleep 3
done
- name: Run database migrations
if: github.repository == 'prowler-cloud/prowler'
run: |
echo "Running Django migrations..."
docker compose exec -T api sh -c '
poetry run python manage.py migrate --database admin
'
echo "Database migrations completed!"
- name: Copy local fixtures into API container
if: github.repository == 'prowler-cloud/prowler'
run: |
docker cp ./api/src/backend/api/fixtures/dev/. prowler-api-1:/home/prowler/backend/api/fixtures/dev
- name: Load database fixtures for e2e tests
if: github.repository == 'prowler-cloud/prowler'
run: |
docker compose exec -T api sh -c '
echo "Loading all fixtures from api/fixtures/dev/..."
for fixture in api/fixtures/dev/*.json; do
if [ -f "$fixture" ]; then
echo "Loading $fixture"
poetry run python manage.py loaddata "$fixture" --database admin
fi
done
echo "All database fixtures loaded successfully!"
'
- name: Setup Node.js
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
@@ -66,6 +111,9 @@ jobs:
- name: Install dependencies
working-directory: ./ui
run: npm ci
- name: Build the application
working-directory: ./ui
run: npm run build
- name: Cache Playwright browsers
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
id: playwright-cache
@@ -78,9 +126,6 @@ jobs:
working-directory: ./ui
if: steps.playwright-cache.outputs.cache-hit != 'true'
run: npm run test:e2e:install
- name: Build the application
working-directory: ./ui
run: npm run build
- name: Run Playwright tests
working-directory: ./ui
run: npm run test:e2e
@@ -92,6 +137,13 @@ jobs:
path: ui/playwright-report/
retention-days: 30
- name: Cleanup services
if: github.repository == 'prowler-cloud/prowler'
run: |
echo "Shutting down services..."
docker-compose down -v || true
echo "Cleanup completed"
test-container-build:
runs-on: ubuntu-latest
steps:

View File

@@ -2,6 +2,27 @@
All notable changes to the **Prowler API** are documented in this file.
## [1.11.0] (Prowler UNRELEASED)
### Added
- Github provider support [(#8271)](https://github.com/prowler-cloud/prowler/pull/8271)
---
## [1.10.2] (Prowler v5.9.2)
### Changed
- Optimized queries for resources views [(#8336)](https://github.com/prowler-cloud/prowler/pull/8336)
---
## [v1.10.1] (Prowler v5.9.1)
### Fixed
- Calculate failed findings during scans to prevent heavy database queries [(#8322)](https://github.com/prowler-cloud/prowler/pull/8322)
---
## [v1.10.0] (Prowler v5.9.0)
### Added
@@ -12,7 +33,7 @@ All notable changes to the **Prowler API** are documented in this file.
- `/processors` endpoints to post-process findings. Currently, only the Mutelist processor is supported to allow to mute findings.
- Optimized the underlying queries for resources endpoints [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
- Optimized include parameters for resources view [(#8229)](https://github.com/prowler-cloud/prowler/pull/8229)
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
### Fixed
- Search filter for findings and resources [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)

View File

@@ -38,7 +38,7 @@ name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.10.0"
version = "1.10.2"
[project.scripts]
celery = "src.backend.config.settings.celery"

View File

@@ -24,5 +24,18 @@
"is_active": true,
"date_joined": "2024-09-18T09:04:20.850Z"
}
},
{
"model": "api.user",
"pk": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
"fields": {
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
"last_login": null,
"name": "E2E Test User",
"email": "e2e@prowler.com",
"company_name": "Prowler E2E Tests",
"is_active": true,
"date_joined": "2024-01-01T00:00:00.850Z"
}
}
]

View File

@@ -46,5 +46,24 @@
"role": "member",
"date_joined": "2024-09-19T11:03:59.712Z"
}
},
{
"model": "api.tenant",
"pk": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
"fields": {
"inserted_at": "2024-01-01T00:00:00Z",
"updated_at": "2024-01-01T00:00:00Z",
"name": "E2E Test Tenant"
}
},
{
"model": "api.membership",
"pk": "9b1a2c3d-4e5f-6789-abc1-23456789def0",
"fields": {
"user": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
"role": "owner",
"date_joined": "2024-01-01T00:00:00.000Z"
}
}
]

View File

@@ -149,5 +149,32 @@
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
"inserted_at": "2024-11-20T15:36:14.302Z"
}
},
{
"model": "api.role",
"pk": "a5b6c7d8-9e0f-1234-5678-90abcdef1234",
"fields": {
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
"name": "e2e_admin",
"manage_users": true,
"manage_account": true,
"manage_billing": true,
"manage_providers": true,
"manage_integrations": true,
"manage_scans": true,
"unlimited_visibility": true,
"inserted_at": "2024-01-01T00:00:00.000Z",
"updated_at": "2024-01-01T00:00:00.000Z"
}
},
{
"model": "api.userrolerelationship",
"pk": "f1e2d3c4-b5a6-9876-5432-10fedcba9876",
"fields": {
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
"role": "a5b6c7d8-9e0f-1234-5678-90abcdef1234",
"user": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
"inserted_at": "2024-01-01T00:00:00.000Z"
}
}
]

View File

@@ -0,0 +1,30 @@
from functools import partial
from django.db import migrations
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0039_resource_resources_failed_findings_idx"),
]
operations = [
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="resource_finding_mappings",
index_name="rfm_tenant_resource_idx",
columns="tenant_id, resource_id",
method="BTREE",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="resource_finding_mappings",
index_name="rfm_tenant_resource_idx",
),
),
]

View File

@@ -0,0 +1,17 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0040_rfm_tenant_resource_index_partitions"),
]
operations = [
migrations.AddIndex(
model_name="resourcefindingmapping",
index=models.Index(
fields=["tenant_id", "resource_id"],
name="rfm_tenant_resource_idx",
),
),
]

View File

@@ -0,0 +1,23 @@
from django.contrib.postgres.operations import AddIndexConcurrently
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0041_rfm_tenant_resource_parent_partitions"),
("django_celery_beat", "0019_alter_periodictasks_options"),
]
operations = [
AddIndexConcurrently(
model_name="scan",
index=models.Index(
condition=models.Q(("state", "completed")),
fields=["tenant_id", "provider_id", "-inserted_at"],
include=("id",),
name="scans_prov_ins_desc_idx",
),
),
]

View File

@@ -0,0 +1,33 @@
# Generated by Django 5.1.7 on 2025-07-09 14:44
from django.db import migrations
import api.db_utils
class Migration(migrations.Migration):
dependencies = [
("api", "0042_scan_scans_prov_ins_desc_idx"),
]
operations = [
migrations.AlterField(
model_name="provider",
name="provider",
field=api.db_utils.ProviderEnumField(
choices=[
("aws", "AWS"),
("azure", "Azure"),
("gcp", "GCP"),
("kubernetes", "Kubernetes"),
("m365", "M365"),
("github", "GitHub"),
],
default="aws",
),
),
migrations.RunSQL(
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'github';",
reverse_sql=migrations.RunSQL.noop,
),
]

View File

@@ -205,6 +205,7 @@ class Provider(RowLevelSecurityProtectedModel):
GCP = "gcp", _("GCP")
KUBERNETES = "kubernetes", _("Kubernetes")
M365 = "m365", _("M365")
GITHUB = "github", _("GitHub")
@staticmethod
def validate_aws_uid(value):
@@ -265,6 +266,16 @@ class Provider(RowLevelSecurityProtectedModel):
pointer="/data/attributes/uid",
)
@staticmethod
def validate_github_uid(value):
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9-]{0,38}$", value):
raise ModelValidationError(
detail="GitHub provider ID must be a valid GitHub username or organization name (1-39 characters, "
"starting with alphanumeric, containing only alphanumeric characters and hyphens).",
code="github-uid",
pointer="/data/attributes/uid",
)
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
@@ -476,6 +487,13 @@ class Scan(RowLevelSecurityProtectedModel):
condition=Q(state=StateChoices.COMPLETED),
name="scans_prov_state_ins_desc_idx",
),
# TODO This might replace `scans_prov_state_ins_desc_idx` completely. Review usage
models.Index(
fields=["tenant_id", "provider_id", "-inserted_at"],
condition=Q(state=StateChoices.COMPLETED),
include=["id"],
name="scans_prov_ins_desc_idx",
),
]
class JSONAPIMeta:
@@ -860,6 +878,10 @@ class ResourceFindingMapping(PostgresPartitionedModel, RowLevelSecurityProtected
fields=["tenant_id", "finding_id"],
name="rfm_tenant_finding_idx",
),
models.Index(
fields=["tenant_id", "resource_id"],
name="rfm_tenant_resource_idx",
),
]
constraints = [
models.UniqueConstraint(

View File

@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: Prowler API
version: 1.10.0
version: 1.11.0
description: |-
Prowler API specification.
@@ -544,6 +544,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -552,6 +553,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
- in: query
name: filter[provider_type__in]
schema:
@@ -562,6 +564,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -572,6 +575,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
explode: false
style: form
- in: query
@@ -1061,6 +1065,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -1069,6 +1074,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
- in: query
name: filter[provider_type__in]
schema:
@@ -1079,6 +1085,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -1089,6 +1096,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
explode: false
style: form
- in: query
@@ -1486,6 +1494,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -1494,6 +1503,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
- in: query
name: filter[provider_type__in]
schema:
@@ -1504,6 +1514,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -1514,6 +1525,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
explode: false
style: form
- in: query
@@ -1909,6 +1921,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -1917,6 +1930,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
- in: query
name: filter[provider_type__in]
schema:
@@ -1927,6 +1941,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -1937,6 +1952,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
explode: false
style: form
- in: query
@@ -2320,6 +2336,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -2328,6 +2345,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
- in: query
name: filter[provider_type__in]
schema:
@@ -2338,6 +2356,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -2348,6 +2367,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
explode: false
style: form
- in: query
@@ -3121,6 +3141,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -3129,6 +3150,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
- in: query
name: filter[provider_type__in]
schema:
@@ -3139,6 +3161,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -3149,6 +3172,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
explode: false
style: form
- in: query
@@ -3282,6 +3306,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -3290,6 +3315,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
- in: query
name: filter[provider_type__in]
schema:
@@ -3300,6 +3326,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -3310,6 +3337,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
explode: false
style: form
- in: query
@@ -3459,6 +3487,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -3467,6 +3496,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
- in: query
name: filter[provider_type__in]
schema:
@@ -3477,6 +3507,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -3487,6 +3518,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
explode: false
style: form
- in: query
@@ -4165,6 +4197,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -4173,6 +4206,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
- in: query
name: filter[provider__in]
schema:
@@ -4746,6 +4780,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -4754,6 +4789,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
- in: query
name: filter[provider_type__in]
schema:
@@ -4764,6 +4800,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -4774,6 +4811,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
explode: false
style: form
- in: query
@@ -6457,6 +6495,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -6465,6 +6504,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
- in: query
name: filter[provider_type__in]
schema:
@@ -6475,6 +6515,7 @@ paths:
- aws
- azure
- gcp
- github
- kubernetes
- m365
description: |-
@@ -6485,6 +6526,7 @@ paths:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
explode: false
style: form
- in: query
@@ -11130,6 +11172,34 @@ components:
encoded as a string.
required:
- kubeconfig_content
- type: object
title: GitHub Personal Access Token
properties:
personal_access_token:
type: string
description: GitHub personal access token for authentication.
required:
- personal_access_token
- type: object
title: GitHub OAuth App Token
properties:
oauth_app_token:
type: string
description: GitHub OAuth App token for authentication.
required:
- oauth_app_token
- type: object
title: GitHub App Credentials
properties:
github_app_id:
type: integer
description: GitHub App ID for authentication.
github_app_key:
type: string
description: Path to the GitHub App private key file.
required:
- github_app_id
- github_app_key
writeOnly: true
required:
- secret
@@ -12035,6 +12105,7 @@ components:
- gcp
- kubernetes
- m365
- github
type: string
description: |-
* `aws` - AWS
@@ -12042,6 +12113,7 @@ components:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
uid:
type: string
title: Unique identifier for the provider, set by the provider
@@ -12149,6 +12221,7 @@ components:
- gcp
- kubernetes
- m365
- github
type: string
description: |-
* `aws` - AWS
@@ -12156,6 +12229,7 @@ components:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
uid:
type: string
title: Unique identifier for the provider, set by the provider
@@ -12194,6 +12268,7 @@ components:
- gcp
- kubernetes
- m365
- github
type: string
description: |-
* `aws` - AWS
@@ -12201,6 +12276,7 @@ components:
* `gcp` - GCP
* `kubernetes` - Kubernetes
* `m365` - M365
* `github` - GitHub
uid:
type: string
minLength: 3
@@ -12852,6 +12928,34 @@ components:
as a string.
required:
- kubeconfig_content
- type: object
title: GitHub Personal Access Token
properties:
personal_access_token:
type: string
description: GitHub personal access token for authentication.
required:
- personal_access_token
- type: object
title: GitHub OAuth App Token
properties:
oauth_app_token:
type: string
description: GitHub OAuth App token for authentication.
required:
- oauth_app_token
- type: object
title: GitHub App Credentials
properties:
github_app_id:
type: integer
description: GitHub App ID for authentication.
github_app_key:
type: string
description: Path to the GitHub App private key file.
required:
- github_app_id
- github_app_key
writeOnly: true
required:
- secret_type
@@ -13071,6 +13175,34 @@ components:
encoded as a string.
required:
- kubeconfig_content
- type: object
title: GitHub Personal Access Token
properties:
personal_access_token:
type: string
description: GitHub personal access token for authentication.
required:
- personal_access_token
- type: object
title: GitHub OAuth App Token
properties:
oauth_app_token:
type: string
description: GitHub OAuth App token for authentication.
required:
- oauth_app_token
- type: object
title: GitHub App Credentials
properties:
github_app_id:
type: integer
description: GitHub App ID for authentication.
github_app_key:
type: string
description: Path to the GitHub App private key file.
required:
- github_app_id
- github_app_key
writeOnly: true
required:
- secret_type
@@ -13305,6 +13437,34 @@ components:
as a string.
required:
- kubeconfig_content
- type: object
title: GitHub Personal Access Token
properties:
personal_access_token:
type: string
description: GitHub personal access token for authentication.
required:
- personal_access_token
- type: object
title: GitHub OAuth App Token
properties:
oauth_app_token:
type: string
description: GitHub OAuth App token for authentication.
required:
- oauth_app_token
- type: object
title: GitHub App Credentials
properties:
github_app_id:
type: integer
description: GitHub App ID for authentication.
github_app_key:
type: string
description: Path to the GitHub App private key file.
required:
- github_app_id
- github_app_key
writeOnly: true
required:
- secret

View File

@@ -966,6 +966,31 @@ class TestProviderViewSet:
"uid": "subdomain1.subdomain2.subdomain3.subdomain4.domain.net",
"alias": "test",
},
{
"provider": "github",
"uid": "test-user",
"alias": "test",
},
{
"provider": "github",
"uid": "test-organization",
"alias": "GitHub Org",
},
{
"provider": "github",
"uid": "prowler-cloud",
"alias": "Prowler",
},
{
"provider": "github",
"uid": "microsoft",
"alias": "Microsoft",
},
{
"provider": "github",
"uid": "a12345678901234567890123456789012345678",
"alias": "Long Username",
},
]
),
)
@@ -1079,6 +1104,42 @@ class TestProviderViewSet:
"m365-uid",
"uid",
),
(
{
"provider": "github",
"uid": "-invalid-start",
"alias": "test",
},
"github-uid",
"uid",
),
(
{
"provider": "github",
"uid": "invalid@username",
"alias": "test",
},
"github-uid",
"uid",
),
(
{
"provider": "github",
"uid": "invalid_username",
"alias": "test",
},
"github-uid",
"uid",
),
(
{
"provider": "github",
"uid": "a" * 40,
"alias": "test",
},
"github-uid",
"uid",
),
]
),
)
@@ -5188,6 +5249,8 @@ class TestComplianceOverviewViewSet:
assert "description" in attributes
assert "status" in attributes
# TODO: This test may fail randomly because requirements are not ordered
@pytest.mark.xfail
def test_compliance_overview_requirements_manual(
self, authenticated_client, compliance_requirements_overviews_fixture
):

View File

@@ -13,6 +13,7 @@ from prowler.providers.aws.aws_provider import AwsProvider
from prowler.providers.azure.azure_provider import AzureProvider
from prowler.providers.common.models import Connection
from prowler.providers.gcp.gcp_provider import GcpProvider
from prowler.providers.github.github_provider import GithubProvider
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
from prowler.providers.m365.m365_provider import M365Provider
@@ -55,14 +56,21 @@ def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:
def return_prowler_provider(
provider: Provider,
) -> [AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider]:
) -> [
AwsProvider
| AzureProvider
| GcpProvider
| GithubProvider
| KubernetesProvider
| M365Provider
]:
"""Return the Prowler provider class based on the given provider type.
Args:
provider (Provider): The provider object containing the provider type and associated secrets.
Returns:
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: The corresponding provider class.
AwsProvider | AzureProvider | GcpProvider | GithubProvider | KubernetesProvider | M365Provider: The corresponding provider class.
Raises:
ValueError: If the provider type specified in `provider.provider` is not supported.
@@ -78,6 +86,8 @@ def return_prowler_provider(
prowler_provider = KubernetesProvider
case Provider.ProviderChoices.M365.value:
prowler_provider = M365Provider
case Provider.ProviderChoices.GITHUB.value:
prowler_provider = GithubProvider
case _:
raise ValueError(f"Provider type {provider.provider} not supported")
return prowler_provider
@@ -120,7 +130,14 @@ def get_prowler_provider_kwargs(
def initialize_prowler_provider(
provider: Provider,
mutelist_processor: Processor | None = None,
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider:
) -> (
AwsProvider
| AzureProvider
| GcpProvider
| GithubProvider
| KubernetesProvider
| M365Provider
):
"""Initialize a Prowler provider instance based on the given provider type.
Args:
@@ -128,8 +145,8 @@ def initialize_prowler_provider(
mutelist_processor (Processor): The mutelist processor object containing the mutelist configuration.
Returns:
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: An instance of the corresponding provider class
(`AwsProvider`, `AzureProvider`, `GcpProvider`, `KubernetesProvider` or `M365Provider`) initialized with the
AwsProvider | AzureProvider | GcpProvider | GithubProvider | KubernetesProvider | M365Provider: An instance of the corresponding provider class
(`AwsProvider`, `AzureProvider`, `GcpProvider`, `GithubProvider`, `KubernetesProvider` or `M365Provider`) initialized with the
provider's secrets.
"""
prowler_provider = return_prowler_provider(provider)

View File

@@ -176,6 +176,43 @@ from rest_framework_json_api import serializers
},
"required": ["kubeconfig_content"],
},
{
"type": "object",
"title": "GitHub Personal Access Token",
"properties": {
"personal_access_token": {
"type": "string",
"description": "GitHub personal access token for authentication.",
}
},
"required": ["personal_access_token"],
},
{
"type": "object",
"title": "GitHub OAuth App Token",
"properties": {
"oauth_app_token": {
"type": "string",
"description": "GitHub OAuth App token for authentication.",
}
},
"required": ["oauth_app_token"],
},
{
"type": "object",
"title": "GitHub App Credentials",
"properties": {
"github_app_id": {
"type": "integer",
"description": "GitHub App ID for authentication.",
},
"github_app_key": {
"type": "string",
"description": "Path to the GitHub App private key file.",
},
},
"required": ["github_app_id", "github_app_key"],
},
]
}
)

View File

@@ -1217,6 +1217,8 @@ class BaseWriteProviderSecretSerializer(BaseWriteSerializer):
serializer = AzureProviderSecret(data=secret)
elif provider_type == Provider.ProviderChoices.GCP.value:
serializer = GCPProviderSecret(data=secret)
elif provider_type == Provider.ProviderChoices.GITHUB.value:
serializer = GithubProviderSecret(data=secret)
elif provider_type == Provider.ProviderChoices.KUBERNETES.value:
serializer = KubernetesProviderSecret(data=secret)
elif provider_type == Provider.ProviderChoices.M365.value:
@@ -1296,6 +1298,16 @@ class KubernetesProviderSecret(serializers.Serializer):
resource_name = "provider-secrets"
class GithubProviderSecret(serializers.Serializer):
personal_access_token = serializers.CharField(required=False)
oauth_app_token = serializers.CharField(required=False)
github_app_id = serializers.IntegerField(required=False)
github_app_key_content = serializers.CharField(required=False)
class Meta:
resource_name = "provider-secrets"
class AWSRoleAssumptionProviderSecret(serializers.Serializer):
role_arn = serializers.CharField()
external_id = serializers.CharField()

View File

@@ -22,7 +22,7 @@ from django.conf import settings as django_settings
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.search import SearchQuery
from django.db import transaction
from django.db.models import Count, F, Prefetch, Q, Sum
from django.db.models import Count, F, Prefetch, Q, Subquery, Sum
from django.db.models.functions import Coalesce
from django.http import HttpResponse
from django.shortcuts import redirect
@@ -292,7 +292,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.10.0"
spectacular_settings.VERSION = "1.10.2"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)
@@ -1994,6 +1994,21 @@ class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
)
)
def _should_prefetch_findings(self) -> bool:
fields_param = self.request.query_params.get("fields[resources]", "")
include_param = self.request.query_params.get("include", "")
return (
fields_param == ""
or "findings" in fields_param.split(",")
or "findings" in include_param.split(",")
)
def _get_findings_prefetch(self):
findings_queryset = Finding.all_objects.defer("scan", "resources").filter(
tenant_id=self.request.tenant_id
)
return [Prefetch("findings", queryset=findings_queryset)]
def get_serializer_class(self):
if self.action in ["metadata", "metadata_latest"]:
return ResourceMetadataSerializer
@@ -2017,7 +2032,11 @@ class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
filtered_queryset,
manager=Resource.all_objects,
select_related=["provider"],
prefetch_related=["findings"],
prefetch_related=(
self._get_findings_prefetch()
if self._should_prefetch_findings()
else []
),
)
def retrieve(self, request, *args, **kwargs):
@@ -2042,14 +2061,18 @@ class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
tenant_id = request.tenant_id
filtered_queryset = self.filter_queryset(self.get_queryset())
latest_scan_ids = (
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
latest_scans = (
Scan.all_objects.filter(
tenant_id=tenant_id,
state=StateChoices.COMPLETED,
)
.order_by("provider_id", "-inserted_at")
.distinct("provider_id")
.values_list("id", flat=True)
.values("provider_id")
)
filtered_queryset = filtered_queryset.filter(
tenant_id=tenant_id, provider__scan__in=latest_scan_ids
provider_id__in=Subquery(latest_scans)
)
return self.paginate_by_pk(
@@ -2057,7 +2080,11 @@ class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
filtered_queryset,
manager=Resource.all_objects,
select_related=["provider"],
prefetch_related=["findings"],
prefetch_related=(
self._get_findings_prefetch()
if self._should_prefetch_findings()
else []
),
)
@action(detail=False, methods=["get"], url_name="metadata")

View File

@@ -20,6 +20,7 @@ from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected im
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
from prowler.lib.outputs.compliance.cis.cis_github import GithubCIS
from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS
from prowler.lib.outputs.compliance.cis.cis_m365 import M365CIS
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
@@ -93,6 +94,9 @@ COMPLIANCE_CLASS_MAP = {
(lambda name: name == "prowler_threatscore_m365", ProwlerThreatScoreM365),
(lambda name: name.startswith("iso27001_"), M365ISO27001),
],
"github": [
(lambda name: name.startswith("cis_"), GithubCIS),
],
}

View File

@@ -1,11 +1,12 @@
import json
import time
from collections import defaultdict
from copy import deepcopy
from datetime import datetime, timezone
from celery.utils.log import get_task_logger
from config.settings.celery import CELERY_DEADLOCK_ATTEMPTS
from django.db import IntegrityError, OperationalError, connection
from django.db import IntegrityError, OperationalError
from django.db.models import Case, Count, IntegerField, Prefetch, Sum, When
from tasks.utils import CustomEncoder
@@ -13,7 +14,11 @@ from api.compliance import (
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
generate_scan_compliance,
)
from api.db_utils import create_objects_in_batches, rls_transaction
from api.db_utils import (
create_objects_in_batches,
rls_transaction,
update_objects_in_batches,
)
from api.exceptions import ProviderConnectionError
from api.models import (
ComplianceRequirementOverview,
@@ -103,7 +108,10 @@ def _store_resources(
def perform_prowler_scan(
tenant_id: str, scan_id: str, provider_id: str, checks_to_execute: list[str] = None
tenant_id: str,
scan_id: str,
provider_id: str,
checks_to_execute: list[str] | None = None,
):
"""
Perform a scan using Prowler and store the findings and resources in the database.
@@ -175,6 +183,7 @@ def perform_prowler_scan(
resource_cache = {}
tag_cache = {}
last_status_cache = {}
resource_failed_findings_cache = defaultdict(int)
for progress, findings in prowler_scan.scan():
for finding in findings:
@@ -200,6 +209,9 @@ def perform_prowler_scan(
},
)
resource_cache[resource_uid] = resource_instance
# Initialize all processed resources in the cache
resource_failed_findings_cache[resource_uid] = 0
else:
resource_instance = resource_cache[resource_uid]
@@ -313,6 +325,11 @@ def perform_prowler_scan(
)
finding_instance.add_resources([resource_instance])
# Increment failed_findings_count cache if the finding status is FAIL and not muted
if status == FindingStatus.FAIL and not finding.muted:
resource_uid = finding.resource_uid
resource_failed_findings_cache[resource_uid] += 1
# Update scan resource summaries
scan_resource_cache.add(
(
@@ -330,6 +347,24 @@ def perform_prowler_scan(
scan_instance.state = StateChoices.COMPLETED
# Update failed_findings_count for all resources in batches if scan completed successfully
if resource_failed_findings_cache:
resources_to_update = []
for resource_uid, failed_count in resource_failed_findings_cache.items():
if resource_uid in resource_cache:
resource_instance = resource_cache[resource_uid]
resource_instance.failed_findings_count = failed_count
resources_to_update.append(resource_instance)
if resources_to_update:
update_objects_in_batches(
tenant_id=tenant_id,
model=Resource,
objects=resources_to_update,
fields=["failed_findings_count"],
batch_size=1000,
)
except Exception as e:
logger.error(f"Error performing scan {scan_id}: {e}")
exception = e
@@ -376,7 +411,6 @@ def perform_prowler_scan(
def aggregate_findings(tenant_id: str, scan_id: str):
"""
Aggregates findings for a given scan and stores the results in the ScanSummary table.
Also updates the failed_findings_count for each resource based on the latest findings.
This function retrieves all findings associated with a given `scan_id` and calculates various
metrics such as counts of failed, passed, and muted findings, as well as their deltas (new,
@@ -405,8 +439,6 @@ def aggregate_findings(tenant_id: str, scan_id: str):
- muted_new: Muted findings with a delta of 'new'.
- muted_changed: Muted findings with a delta of 'changed'.
"""
_update_resource_failed_findings_count(tenant_id, scan_id)
with rls_transaction(tenant_id):
findings = Finding.objects.filter(tenant_id=tenant_id, scan_id=scan_id)
@@ -531,48 +563,6 @@ def aggregate_findings(tenant_id: str, scan_id: str):
ScanSummary.objects.bulk_create(scan_aggregations, batch_size=3000)
def _update_resource_failed_findings_count(tenant_id: str, scan_id: str):
"""
Update the failed_findings_count field for resources based on the latest findings.
This function calculates the number of failed findings for each resource by:
1. Getting the latest finding for each finding.uid
2. Counting failed findings per resource
3. Updating the failed_findings_count field for each resource
Args:
tenant_id (str): The ID of the tenant to which the scan belongs.
scan_id (str): The ID of the scan for which to update resource counts.
"""
with rls_transaction(tenant_id):
scan = Scan.objects.get(pk=scan_id)
provider_id = str(scan.provider_id)
with connection.cursor() as cursor:
cursor.execute(
"""
UPDATE resources AS r
SET failed_findings_count = COALESCE((
SELECT COUNT(*) FROM (
SELECT DISTINCT ON (f.uid) f.uid
FROM findings AS f
JOIN resource_finding_mappings AS rfm
ON rfm.finding_id = f.id
WHERE f.tenant_id = %s
AND f.status = %s
AND f.muted = FALSE
AND rfm.resource_id = r.id
ORDER BY f.uid, f.inserted_at DESC
) AS latest_uids
), 0)
WHERE r.tenant_id = %s
AND r.provider_id = %s
""",
[tenant_id, FindingStatus.FAIL, tenant_id, provider_id],
)
def create_compliance_requirements(tenant_id: str, scan_id: str):
"""
Create detailed compliance requirement overview records for a scan.

View File

@@ -7,22 +7,14 @@ import pytest
from tasks.jobs.scan import (
_create_finding_delta,
_store_resources,
_update_resource_failed_findings_count,
create_compliance_requirements,
perform_prowler_scan,
)
from tasks.utils import CustomEncoder
from api.exceptions import ProviderConnectionError
from api.models import (
Finding,
Provider,
Resource,
Scan,
Severity,
StateChoices,
StatusChoices,
)
from api.models import Finding, Provider, Resource, Scan, StateChoices, StatusChoices
from prowler.lib.check.models import Severity
@pytest.mark.django_db
@@ -182,6 +174,9 @@ class TestPerformScan:
assert tag_keys == set(finding.resource_tags.keys())
assert tag_values == set(finding.resource_tags.values())
# Assert that failed_findings_count is 0 (finding is PASS and muted)
assert scan_resource.failed_findings_count == 0
@patch("tasks.jobs.scan.ProwlerScan")
@patch(
"tasks.jobs.scan.initialize_prowler_provider",
@@ -386,6 +381,359 @@ class TestPerformScan:
assert resource == resource_instance
assert resource_uid_tuple == (resource_instance.uid, resource_instance.region)
def test_perform_prowler_scan_with_failed_findings(
self,
tenants_fixture,
scans_fixture,
providers_fixture,
):
"""Test that failed findings increment the failed_findings_count"""
with (
patch("api.db_utils.rls_transaction"),
patch(
"tasks.jobs.scan.initialize_prowler_provider"
) as mock_initialize_prowler_provider,
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
patch(
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
new_callable=dict,
),
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
):
# Ensure the database is empty
assert Finding.objects.count() == 0
assert Resource.objects.count() == 0
tenant = tenants_fixture[0]
scan = scans_fixture[0]
provider = providers_fixture[0]
# Ensure the provider type is 'aws'
provider.provider = Provider.ProviderChoices.AWS
provider.save()
tenant_id = str(tenant.id)
scan_id = str(scan.id)
provider_id = str(provider.id)
# Mock a FAIL finding that is not muted
fail_finding = MagicMock()
fail_finding.uid = "fail_finding_uid"
fail_finding.status = StatusChoices.FAIL
fail_finding.status_extended = "test fail status"
fail_finding.severity = Severity.high
fail_finding.check_id = "fail_check"
fail_finding.get_metadata.return_value = {"key": "value"}
fail_finding.resource_uid = "resource_uid_fail"
fail_finding.resource_name = "fail_resource"
fail_finding.region = "us-east-1"
fail_finding.service_name = "ec2"
fail_finding.resource_type = "instance"
fail_finding.resource_tags = {"env": "test"}
fail_finding.muted = False
fail_finding.raw = {}
fail_finding.resource_metadata = {"test": "metadata"}
fail_finding.resource_details = {"details": "test"}
fail_finding.partition = "aws"
fail_finding.compliance = {"compliance1": "FAIL"}
# Mock the ProwlerScan instance
mock_prowler_scan_instance = MagicMock()
mock_prowler_scan_instance.scan.return_value = [(100, [fail_finding])]
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
# Mock prowler_provider
mock_prowler_provider_instance = MagicMock()
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
mock_initialize_prowler_provider.return_value = (
mock_prowler_provider_instance
)
# Call the function under test
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
# Refresh instances from the database
scan.refresh_from_db()
scan_resource = Resource.objects.get(provider=provider)
# Assert that failed_findings_count is 1 (one FAIL finding not muted)
assert scan_resource.failed_findings_count == 1
def test_perform_prowler_scan_multiple_findings_same_resource(
self,
tenants_fixture,
scans_fixture,
providers_fixture,
):
"""Test that multiple FAIL findings on the same resource increment the counter correctly"""
with (
patch("api.db_utils.rls_transaction"),
patch(
"tasks.jobs.scan.initialize_prowler_provider"
) as mock_initialize_prowler_provider,
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
patch(
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
new_callable=dict,
),
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
):
tenant = tenants_fixture[0]
scan = scans_fixture[0]
provider = providers_fixture[0]
provider.provider = Provider.ProviderChoices.AWS
provider.save()
tenant_id = str(tenant.id)
scan_id = str(scan.id)
provider_id = str(provider.id)
# Create multiple findings for the same resource
# Two FAIL findings (not muted) and one PASS finding
resource_uid = "shared_resource_uid"
fail_finding_1 = MagicMock()
fail_finding_1.uid = "fail_finding_1"
fail_finding_1.status = StatusChoices.FAIL
fail_finding_1.status_extended = "fail 1"
fail_finding_1.severity = Severity.high
fail_finding_1.check_id = "fail_check_1"
fail_finding_1.get_metadata.return_value = {"key": "value1"}
fail_finding_1.resource_uid = resource_uid
fail_finding_1.resource_name = "shared_resource"
fail_finding_1.region = "us-east-1"
fail_finding_1.service_name = "ec2"
fail_finding_1.resource_type = "instance"
fail_finding_1.resource_tags = {}
fail_finding_1.muted = False
fail_finding_1.raw = {}
fail_finding_1.resource_metadata = {}
fail_finding_1.resource_details = {}
fail_finding_1.partition = "aws"
fail_finding_1.compliance = {}
fail_finding_2 = MagicMock()
fail_finding_2.uid = "fail_finding_2"
fail_finding_2.status = StatusChoices.FAIL
fail_finding_2.status_extended = "fail 2"
fail_finding_2.severity = Severity.medium
fail_finding_2.check_id = "fail_check_2"
fail_finding_2.get_metadata.return_value = {"key": "value2"}
fail_finding_2.resource_uid = resource_uid
fail_finding_2.resource_name = "shared_resource"
fail_finding_2.region = "us-east-1"
fail_finding_2.service_name = "ec2"
fail_finding_2.resource_type = "instance"
fail_finding_2.resource_tags = {}
fail_finding_2.muted = False
fail_finding_2.raw = {}
fail_finding_2.resource_metadata = {}
fail_finding_2.resource_details = {}
fail_finding_2.partition = "aws"
fail_finding_2.compliance = {}
pass_finding = MagicMock()
pass_finding.uid = "pass_finding"
pass_finding.status = StatusChoices.PASS
pass_finding.status_extended = "pass"
pass_finding.severity = Severity.low
pass_finding.check_id = "pass_check"
pass_finding.get_metadata.return_value = {"key": "value3"}
pass_finding.resource_uid = resource_uid
pass_finding.resource_name = "shared_resource"
pass_finding.region = "us-east-1"
pass_finding.service_name = "ec2"
pass_finding.resource_type = "instance"
pass_finding.resource_tags = {}
pass_finding.muted = False
pass_finding.raw = {}
pass_finding.resource_metadata = {}
pass_finding.resource_details = {}
pass_finding.partition = "aws"
pass_finding.compliance = {}
# Mock the ProwlerScan instance
mock_prowler_scan_instance = MagicMock()
mock_prowler_scan_instance.scan.return_value = [
(100, [fail_finding_1, fail_finding_2, pass_finding])
]
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
# Mock prowler_provider
mock_prowler_provider_instance = MagicMock()
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
mock_initialize_prowler_provider.return_value = (
mock_prowler_provider_instance
)
# Call the function under test
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
# Refresh instances from the database
scan_resource = Resource.objects.get(provider=provider, uid=resource_uid)
# Assert that failed_findings_count is 2 (two FAIL findings, one PASS)
assert scan_resource.failed_findings_count == 2
def test_perform_prowler_scan_with_muted_findings(
self,
tenants_fixture,
scans_fixture,
providers_fixture,
):
"""Test that muted FAIL findings do not increment the failed_findings_count"""
with (
patch("api.db_utils.rls_transaction"),
patch(
"tasks.jobs.scan.initialize_prowler_provider"
) as mock_initialize_prowler_provider,
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
patch(
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
new_callable=dict,
),
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
):
tenant = tenants_fixture[0]
scan = scans_fixture[0]
provider = providers_fixture[0]
provider.provider = Provider.ProviderChoices.AWS
provider.save()
tenant_id = str(tenant.id)
scan_id = str(scan.id)
provider_id = str(provider.id)
# Mock a FAIL finding that is muted
muted_fail_finding = MagicMock()
muted_fail_finding.uid = "muted_fail_finding"
muted_fail_finding.status = StatusChoices.FAIL
muted_fail_finding.status_extended = "muted fail"
muted_fail_finding.severity = Severity.high
muted_fail_finding.check_id = "muted_fail_check"
muted_fail_finding.get_metadata.return_value = {"key": "value"}
muted_fail_finding.resource_uid = "muted_resource_uid"
muted_fail_finding.resource_name = "muted_resource"
muted_fail_finding.region = "us-east-1"
muted_fail_finding.service_name = "ec2"
muted_fail_finding.resource_type = "instance"
muted_fail_finding.resource_tags = {}
muted_fail_finding.muted = True
muted_fail_finding.raw = {}
muted_fail_finding.resource_metadata = {}
muted_fail_finding.resource_details = {}
muted_fail_finding.partition = "aws"
muted_fail_finding.compliance = {}
# Mock the ProwlerScan instance
mock_prowler_scan_instance = MagicMock()
mock_prowler_scan_instance.scan.return_value = [(100, [muted_fail_finding])]
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
# Mock prowler_provider
mock_prowler_provider_instance = MagicMock()
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
mock_initialize_prowler_provider.return_value = (
mock_prowler_provider_instance
)
# Call the function under test
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
# Refresh instances from the database
scan_resource = Resource.objects.get(provider=provider)
# Assert that failed_findings_count is 0 (FAIL finding is muted)
assert scan_resource.failed_findings_count == 0
def test_perform_prowler_scan_reset_failed_findings_count(
self,
tenants_fixture,
providers_fixture,
resources_fixture,
):
"""Test that failed_findings_count is reset to 0 at the beginning of each scan"""
# Use existing resource from fixture and set initial failed_findings_count
tenant = tenants_fixture[0]
provider = providers_fixture[0]
resource = resources_fixture[0]
# Set a non-zero failed_findings_count initially
resource.failed_findings_count = 5
resource.save()
# Create a new scan
scan = Scan.objects.create(
name="Reset Test Scan",
provider=provider,
trigger=Scan.TriggerChoices.MANUAL,
state=StateChoices.AVAILABLE,
tenant_id=tenant.id,
)
with (
patch("api.db_utils.rls_transaction"),
patch(
"tasks.jobs.scan.initialize_prowler_provider"
) as mock_initialize_prowler_provider,
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
patch(
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
new_callable=dict,
),
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
):
provider.provider = Provider.ProviderChoices.AWS
provider.save()
tenant_id = str(tenant.id)
scan_id = str(scan.id)
provider_id = str(provider.id)
# Mock a PASS finding for the existing resource
pass_finding = MagicMock()
pass_finding.uid = "reset_test_finding"
pass_finding.status = StatusChoices.PASS
pass_finding.status_extended = "reset test pass"
pass_finding.severity = Severity.low
pass_finding.check_id = "reset_test_check"
pass_finding.get_metadata.return_value = {"key": "value"}
pass_finding.resource_uid = resource.uid
pass_finding.resource_name = resource.name
pass_finding.region = resource.region
pass_finding.service_name = resource.service
pass_finding.resource_type = resource.type
pass_finding.resource_tags = {}
pass_finding.muted = False
pass_finding.raw = {}
pass_finding.resource_metadata = {}
pass_finding.resource_details = {}
pass_finding.partition = "aws"
pass_finding.compliance = {}
# Mock the ProwlerScan instance
mock_prowler_scan_instance = MagicMock()
mock_prowler_scan_instance.scan.return_value = [(100, [pass_finding])]
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
# Mock prowler_provider
mock_prowler_provider_instance = MagicMock()
mock_prowler_provider_instance.get_regions.return_value = [resource.region]
mock_initialize_prowler_provider.return_value = (
mock_prowler_provider_instance
)
# Call the function under test
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
# Refresh resource from the database
resource.refresh_from_db()
# Assert that failed_findings_count was reset to 0 during the scan
assert resource.failed_findings_count == 0
# TODO Add tests for aggregations
@@ -697,68 +1045,3 @@ class TestCreateComplianceRequirements:
assert "requirements_created" in result
assert result["requirements_created"] >= 0
@pytest.mark.django_db
class TestUpdateResourceFailedFindingsCount:
def test_execute_sql_update(
self, tenants_fixture, scans_fixture, providers_fixture, resources_fixture
):
resource = resources_fixture[0]
tenant_id = resource.tenant_id
scan_id = resource.provider.scans.first().id
# Common kwargs for all failing findings
base_kwargs = {
"tenant_id": tenant_id,
"scan_id": scan_id,
"delta": None,
"status": StatusChoices.FAIL,
"status_extended": "test status extended",
"impact": Severity.critical,
"impact_extended": "test impact extended",
"severity": Severity.critical,
"raw_result": {
"status": StatusChoices.FAIL,
"impact": Severity.critical,
"severity": Severity.critical,
},
"tags": {"test": "dev-qa"},
"check_id": "test_check_id",
"check_metadata": {
"CheckId": "test_check_id",
"Description": "test description apple sauce",
"servicename": "ec2",
},
"first_seen_at": "2024-01-02T00:00:00Z",
}
# UIDs to create (two with same UID, one unique)
uids = ["test_finding_uid_1", "test_finding_uid_1", "test_finding_uid_2"]
# Create findings and associate with the resource
for uid in uids:
finding = Finding.objects.create(uid=uid, **base_kwargs)
finding.add_resources([resource])
resource.refresh_from_db()
assert resource.failed_findings_count == 0
_update_resource_failed_findings_count(tenant_id=tenant_id, scan_id=scan_id)
resource.refresh_from_db()
# Only two since two findings share the same UID
assert resource.failed_findings_count == 2
@patch("tasks.jobs.scan.Scan.objects.get")
def test_scan_not_found(
self,
mock_scan_get,
):
mock_scan_get.side_effect = Scan.DoesNotExist
with pytest.raises(Scan.DoesNotExist):
_update_resource_failed_findings_count(
"8614ca97-8370-4183-a7f7-e96a6c7d2c93",
"4705bed5-8782-4e8b-bab6-55e8043edaa6",
)

View File

@@ -0,0 +1,234 @@
from locust import events, task
from utils.config import (
L_PROVIDER_NAME,
M_PROVIDER_NAME,
RESOURCES_UI_FIELDS,
S_PROVIDER_NAME,
TARGET_INSERTED_AT,
)
from utils.helpers import (
APIUserBase,
get_api_token,
get_auth_headers,
get_dynamic_filters_pairs,
get_next_resource_filter,
get_scan_id_from_provider_name,
)
GLOBAL = {
"token": None,
"scan_ids": {},
"resource_filters": None,
"large_resource_filters": None,
}
@events.test_start.add_listener
def on_test_start(environment, **kwargs):
GLOBAL["token"] = get_api_token(environment.host)
GLOBAL["scan_ids"]["small"] = get_scan_id_from_provider_name(
environment.host, GLOBAL["token"], S_PROVIDER_NAME
)
GLOBAL["scan_ids"]["medium"] = get_scan_id_from_provider_name(
environment.host, GLOBAL["token"], M_PROVIDER_NAME
)
GLOBAL["scan_ids"]["large"] = get_scan_id_from_provider_name(
environment.host, GLOBAL["token"], L_PROVIDER_NAME
)
GLOBAL["resource_filters"] = get_dynamic_filters_pairs(
environment.host, GLOBAL["token"], "resources"
)
GLOBAL["large_resource_filters"] = get_dynamic_filters_pairs(
environment.host, GLOBAL["token"], "resources", GLOBAL["scan_ids"]["large"]
)
class APIUser(APIUserBase):
def on_start(self):
self.token = GLOBAL["token"]
self.s_scan_id = GLOBAL["scan_ids"]["small"]
self.m_scan_id = GLOBAL["scan_ids"]["medium"]
self.l_scan_id = GLOBAL["scan_ids"]["large"]
self.available_resource_filters = GLOBAL["resource_filters"]
self.available_resource_filters_large_scan = GLOBAL["large_resource_filters"]
@task
def resources_default(self):
name = "/resources"
page_number = self._next_page(name)
endpoint = (
f"/resources?page[number]={page_number}"
f"&filter[updated_at]={TARGET_INSERTED_AT}"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task(3)
def resources_default_ui_fields(self):
name = "/resources?fields"
page_number = self._next_page(name)
endpoint = (
f"/resources?page[number]={page_number}"
f"&fields[resources]={','.join(RESOURCES_UI_FIELDS)}"
f"&filter[updated_at]={TARGET_INSERTED_AT}"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task(3)
def resources_default_include(self):
name = "/resources?include"
page = self._next_page(name)
endpoint = (
f"/resources?page[number]={page}"
f"&filter[updated_at]={TARGET_INSERTED_AT}"
f"&include=provider"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task(3)
def resources_metadata(self):
name = "/resources/metadata"
endpoint = f"/resources/metadata?filter[updated_at]={TARGET_INSERTED_AT}"
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task
def resources_scan_small(self):
name = "/resources?filter[scan_id] - 50k"
page_number = self._next_page(name)
endpoint = (
f"/resources?page[number]={page_number}" f"&filter[scan]={self.s_scan_id}"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task
def resources_metadata_scan_small(self):
name = "/resources/metadata?filter[scan_id] - 50k"
endpoint = f"/resources/metadata?&filter[scan]={self.s_scan_id}"
self.client.get(
endpoint,
headers=get_auth_headers(self.token),
name=name,
)
@task(2)
def resources_scan_medium(self):
name = "/resources?filter[scan_id] - 250k"
page_number = self._next_page(name)
endpoint = (
f"/resources?page[number]={page_number}" f"&filter[scan]={self.m_scan_id}"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task
def resources_metadata_scan_medium(self):
name = "/resources/metadata?filter[scan_id] - 250k"
endpoint = f"/resources/metadata?&filter[scan]={self.m_scan_id}"
self.client.get(
endpoint,
headers=get_auth_headers(self.token),
name=name,
)
@task
def resources_scan_large(self):
name = "/resources?filter[scan_id] - 500k"
page_number = self._next_page(name)
endpoint = (
f"/resources?page[number]={page_number}" f"&filter[scan]={self.l_scan_id}"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task
def resources_scan_large_include(self):
name = "/resources?filter[scan_id]&include - 500k"
page_number = self._next_page(name)
endpoint = (
f"/resources?page[number]={page_number}"
f"&filter[scan]={self.l_scan_id}"
f"&include=provider"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task
def resources_metadata_scan_large(self):
endpoint = f"/resources/metadata?&filter[scan]={self.l_scan_id}"
self.client.get(
endpoint,
headers=get_auth_headers(self.token),
name="/resources/metadata?filter[scan_id] - 500k",
)
@task(2)
def resources_filters(self):
name = "/resources?filter[resource_filter]&include"
filter_name, filter_value = get_next_resource_filter(
self.available_resource_filters
)
endpoint = (
f"/resources?filter[{filter_name}]={filter_value}"
f"&filter[updated_at]={TARGET_INSERTED_AT}"
f"&include=provider"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task(3)
def resources_metadata_filters(self):
name = "/resources/metadata?filter[resource_filter]"
filter_name, filter_value = get_next_resource_filter(
self.available_resource_filters
)
endpoint = (
f"/resources/metadata?filter[{filter_name}]={filter_value}"
f"&filter[updated_at]={TARGET_INSERTED_AT}"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task(3)
def resources_metadata_filters_scan_large(self):
name = "/resources/metadata?filter[resource_filter]&filter[scan_id] - 500k"
filter_name, filter_value = get_next_resource_filter(
self.available_resource_filters
)
endpoint = (
f"/resources/metadata?filter[{filter_name}]={filter_value}"
f"&filter[scan]={self.l_scan_id}"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task(2)
def resourcess_filter_large_scan_include(self):
name = "/resources?filter[resource_filter][scan]&include - 500k"
filter_name, filter_value = get_next_resource_filter(
self.available_resource_filters
)
endpoint = (
f"/resources?filter[{filter_name}]={filter_value}"
f"&filter[scan]={self.l_scan_id}"
f"&include=provider"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task(3)
def resources_latest_default_ui_fields(self):
name = "/resources/latest?fields"
page_number = self._next_page(name)
endpoint = (
f"/resources/latest?page[number]={page_number}"
f"&fields[resources]={','.join(RESOURCES_UI_FIELDS)}"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task(3)
def resources_latest_metadata_filters(self):
name = "/resources/metadata/latest?filter[resource_filter]"
filter_name, filter_value = get_next_resource_filter(
self.available_resource_filters
)
endpoint = f"/resources/metadata/latest?filter[{filter_name}]={filter_value}"
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)

View File

@@ -13,6 +13,23 @@ FINDINGS_RESOURCE_METADATA = {
"resource_types": "resource_type",
"services": "service",
}
RESOURCE_METADATA = {
"regions": "region",
"types": "type",
"services": "service",
}
RESOURCES_UI_FIELDS = [
"name",
"failed_findings_count",
"region",
"service",
"type",
"provider",
"inserted_at",
"updated_at",
"uid",
]
S_PROVIDER_NAME = "provider-50k"
M_PROVIDER_NAME = "provider-250k"

View File

@@ -7,6 +7,7 @@ from locust import HttpUser, between
from utils.config import (
BASE_HEADERS,
FINDINGS_RESOURCE_METADATA,
RESOURCE_METADATA,
TARGET_INSERTED_AT,
USER_EMAIL,
USER_PASSWORD,
@@ -121,13 +122,16 @@ def get_scan_id_from_provider_name(host: str, token: str, provider_name: str) ->
return response.json()["data"][0]["id"]
def get_resource_filters_pairs(host: str, token: str, scan_id: str = "") -> dict:
def get_dynamic_filters_pairs(
host: str, token: str, endpoint: str, scan_id: str = ""
) -> dict:
"""
Retrieves and maps resource metadata filter values from the findings endpoint.
Retrieves and maps metadata filter values from a given endpoint.
Args:
host (str): The host URL of the API.
token (str): Bearer token for authentication.
endpoint (str): The API endpoint to query for metadata.
scan_id (str, optional): Optional scan ID to filter metadata. Defaults to using inserted_at timestamp.
Returns:
@@ -136,22 +140,28 @@ def get_resource_filters_pairs(host: str, token: str, scan_id: str = "") -> dict
Raises:
AssertionError: If the request fails or does not return a 200 status code.
"""
metadata_mapping = (
FINDINGS_RESOURCE_METADATA if endpoint == "findings" else RESOURCE_METADATA
)
date_filter = "inserted_at" if endpoint == "findings" else "updated_at"
metadata_filters = (
f"filter[scan]={scan_id}"
if scan_id
else f"filter[inserted_at]={TARGET_INSERTED_AT}"
else f"filter[{date_filter}]={TARGET_INSERTED_AT}"
)
response = requests.get(
f"{host}/findings/metadata?{metadata_filters}", headers=get_auth_headers(token)
f"{host}/{endpoint}/metadata?{metadata_filters}",
headers=get_auth_headers(token),
)
assert (
response.status_code == 200
), f"Failed to get resource filters values: {response.text}"
attributes = response.json()["data"]["attributes"]
return {
FINDINGS_RESOURCE_METADATA[key]: values
metadata_mapping[key]: values
for key, values in attributes.items()
if key in FINDINGS_RESOURCE_METADATA.keys()
if key in metadata_mapping.keys()
}

View File

@@ -23,6 +23,7 @@ import argparse
import json
import os
import re
import shlex
import signal
import socket
import subprocess
@@ -145,11 +146,11 @@ def _get_script_arguments():
def _run_prowler(prowler_args):
_debug("Running prowler with args: {0}".format(prowler_args), 1)
_prowler_command = "{prowler}/prowler {args}".format(
prowler=PATH_TO_PROWLER, args=prowler_args
_prowler_command = shlex.split(
"{prowler}/prowler {args}".format(prowler=PATH_TO_PROWLER, args=prowler_args)
)
_debug("Running command: {0}".format(_prowler_command), 2)
_process = subprocess.Popen(_prowler_command, stdout=subprocess.PIPE, shell=True)
_debug("Running command: {0}".format(" ".join(_prowler_command)), 2)
_process = subprocess.Popen(_prowler_command, stdout=subprocess.PIPE)
_output, _error = _process.communicate()
_debug("Raw prowler output: {0}".format(_output), 3)
_debug("Raw prowler error: {0}".format(_error), 3)

View File

@@ -109,13 +109,12 @@ Prowler will follow the same credentials search as [Google authentication librar
Prowler for Google Cloud needs the following permissions to be set:
- **Viewer (`roles/viewer`) IAM role**: granted at the project / folder / org level in order to scan the target projects
- **Reader (`roles/reader`) IAM role**: granted at the project / folder / org level in order to scan the target projects
- **Project level settings**: you need to have at least one project with the below settings:
- Identity and Access Management (IAM) API (`iam.googleapis.com`) enabled by either using the
[Google Cloud API UI](https://console.cloud.google.com/apis/api/iam.googleapis.com/metrics) or
by using the gcloud CLI `gcloud services enable iam.googleapis.com --project <your-project-id>` command
- Service Usage Consumer (`roles/serviceusage.serviceUsageConsumer`) IAM role
- Set the quota project to be this project by either running `gcloud auth application-default set-quota-project <project-id>` or by setting an environment variable:
`export GOOGLE_CLOUD_QUOTA_PROJECT=<project-id>`

BIN
docs/img/mutelist-ui-1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 321 KiB

BIN
docs/img/mutelist-ui-2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 276 KiB

BIN
docs/img/mutelist-ui-3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 326 KiB

BIN
docs/img/mutelist-ui-4.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 260 KiB

BIN
docs/img/mutelist-ui-5.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 269 KiB

BIN
docs/img/mutelist-ui-6.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 234 KiB

BIN
docs/img/mutelist-ui-7.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 273 KiB

BIN
docs/img/mutelist-ui-8.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 243 KiB

BIN
docs/img/mutelist-ui-9.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 649 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 121 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 124 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 351 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 139 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 144 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 119 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 103 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 117 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 117 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 359 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 354 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 261 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View File

@@ -312,6 +312,51 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
prowler azure --az-cli-auth
```
### Prowler App Update
You have two options to upgrade your Prowler App installation:
#### Option 1: Change env file with the following values
Edit your `.env` file and change the version values:
```env
PROWLER_UI_VERSION="5.9.0"
PROWLER_API_VERSION="5.9.0"
```
#### Option 2: Run the following command
```bash
docker compose pull --policy always
```
The `--policy always` flag ensures that Docker pulls the latest images even if they already exist locally.
???+ note "What Gets Preserved During Upgrade"
Everything is preserved, nothing will be deleted after the update.
#### Troubleshooting
If containers don't start, check logs for errors:
```bash
# Check logs for errors
docker compose logs
# Verify image versions
docker images | grep prowler
```
If you encounter issues, you can rollback to the previous version by changing the `.env` file back to your previous version and running:
```bash
docker compose pull
docker compose up -d
```
## Prowler container versions
The available versions of Prowler CLI are the following:

View File

@@ -51,7 +51,7 @@ Prowler follows the same search order as [Google authentication libraries](https
???+ note
The credentials must belong to a user or service account with the necessary permissions.
To ensure full access, assign the roles/viewer IAM role to the identity being used.
To ensure full access, assign the roles/reader IAM role to the identity being used.
???+ note
Prowler will use the enabled Google Cloud APIs to get the information needed to perform the checks.
@@ -63,13 +63,12 @@ Prowler follows the same search order as [Google authentication libraries](https
Prowler for Google Cloud needs the following permissions to be set:
- **Viewer (`roles/viewer`) IAM role**: granted at the project / folder / org level in order to scan the target projects
- **Reader (`roles/reader`) IAM role**: granted at the project / folder / org level in order to scan the target projects
- **Project level settings**: you need to have at least one project with the below settings:
- Identity and Access Management (IAM) API (`iam.googleapis.com`) enabled by either using the
[Google Cloud API UI](https://console.cloud.google.com/apis/api/iam.googleapis.com/metrics) or
by using the gcloud CLI `gcloud services enable iam.googleapis.com --project <your-project-id>` command
- Service Usage Consumer (`roles/serviceusage.serviceUsageConsumer`) IAM role
- Set the quota project to be this project by either running `gcloud auth application-default set-quota-project <project-id>` or by setting an environment variable:
`export GOOGLE_CLOUD_QUOTA_PROJECT=<project-id>`

View File

@@ -9,7 +9,7 @@ prowler gcp --organization-id organization-id
```
???+ warning
Make sure that the used credentials have the role Cloud Asset Viewer (`roles/cloudasset.viewer`) or Cloud Asset Owner (`roles/cloudasset.owner`) on the organization level.
Make sure that the used credentials have a role with the `cloudasset.assets.listResource` permission on the organization level like `roles/cloudasset.viewer` (Cloud Asset Viewer) or `roles/cloudasset.owner` (Cloud Asset Owner).
???+ note
With this option, Prowler retrieves all projects within the specified organization, including those organized in folders and nested subfolders. This ensures that every project under the organizations hierarchy is scanned, providing full visibility across the entire organization.

View File

@@ -0,0 +1,59 @@
# Mute Findings (Mutelist)
Prowler App allows users to mute specific findings to focus on the most critical security issues. This comprehensive guide demonstrates how to effectively use the Mutelist feature to manage and prioritize security findings.
## What Is the Mutelist Feature?
The Mutelist feature enables users to:
- **Suppress specific findings** from appearing in future scans
- **Focus on critical issues** by hiding resolved or accepted risks
- **Maintain audit trails** of muted findings for compliance purposes
- **Streamline security workflows** by reducing noise from non-critical findings
## Prerequisites
Before muting findings, ensure:
- Valid access to Prowler App with appropriate permissions
- A provider added to the Prowler App
- Understanding of the security implications of muting specific findings
???+ warning
Muting findings does not resolve underlying security issues. Review each finding carefully before muting to ensure it represents an acceptable risk or has been properly addressed.
## Step 1: Add a provider
To configure Mutelist:
1. Log into Prowler App
2. Navigate to the providers page
![Add provider](../img/mutelist-ui-1.png)
3. Add a provider, then "Configure Muted Findings" button will be enabled in providers page and scans page
![Button enabled in providers page](../img/mutelist-ui-2.png)
![Button enabled in scans pages](../img/mutelist-ui-3.png)
## Step 2: Configure Mutelist
1. Open the modal by clicking "Configure Muted Findings" button
![Open modal](../img/mutelist-ui-4.png)
1. Provide a valid Mutelist in `YAML` format. More details about Mutelist [here](../tutorials/mutelist.md)
![Valid YAML configuration](../img/mutelist-ui-5.png)
If the YAML configuration is invalid, an error message will be displayed
![Wrong YAML configuration](../img/mutelist-ui-7.png)
![Wrong YAML configuration 2](../img/mutelist-ui-8.png)
## Step 3: Review the Mutelist
1. Once added, the configuration can be removed or updated
![Remove or update configuration](../img/mutelist-ui-6.png)
## Step 4: Check muted findings in the scan results
1. Run a new scan
2. Check the muted findings in the scan results
![Check muted fidings](../img/mutelist-ui-9.png)
???+ note
The Mutelist configuration takes effect on the next scans.

View File

@@ -0,0 +1,43 @@
# Entra ID Configuration
This page provides instructions for creating and configuring a Microsoft Entra ID (formerly Azure AD) application to use SAML SSO with Prowler App.
## Creating and Configuring the Enterprise Application
1. From the "Enterprise Applications" page in the Azure Portal, click "+ New application".
![New application](../img/saml/saml-sso-azure-1.png)
2. At the top of the page, click "+ Create your own application".
![Create application](../img/saml/saml-sso-azure-2.png)
3. Enter a name for the application and select the "Integrate any other application you don't find in the gallery (Non-gallery)" option.
![Enter name](../img/saml/saml-sso-azure-3.png)
4. Assign users and groups to the application, then proceed to "Set up single sign on" and select "SAML" as the method.
![Select SAML](../img/saml/saml-sso-azure-4.png)
5. In the "Basic SAML Configuration" section, click "Edit".
![Edit](../img/saml/saml-sso-azure-5.png)
6. Enter the "Identifier (Entity ID)" and "Reply URL (Assertion Consumer Service URL)". These values can be obtained from the SAML SSO integration setup in Prowler App. For detailed instructions, refer to the [SAML SSO Configuration](./prowler-app-sso.md) page.
![Enter data](../img/saml/saml-sso-azure-6.png)
7. In the "SAML Certificates" section, click "Edit".
![Edit](../img/saml/saml-sso-azure-7.png)
8. For the "Signing Option," select "Sign SAML response and assertion", and then click "Save".
![Signing options](../img/saml/saml-sso-azure-8.png)
9. Once the changes are saved, the metadata XML can be downloaded from the "App Federation Metadata Url".
![Metadata XML](../img/saml/saml-sso-azure-9.png)
10. Save the downloaded Metadata XML to a file. To complete the setup, upload this file during the Prowler App integration. (See the [SAML SSO Configuration](./prowler-app-sso.md) page for details).

View File

@@ -1,175 +1,203 @@
# Configuring SAML Single Sign-On (SSO) in Prowler
# SAML Single Sign-On (SSO) Configuration
This guide explains how to enable and test SAML SSO integration in Prowler. It includes environment setup, API endpoints, and how to configure Okta as your Identity Provider (IdP).
This guide provides comprehensive instructions to configure SAML-based Single Sign-On (SSO) in Prowler App. This configuration allows users to authenticate using the organization's Identity Provider (IdP).
This document is divided into two main sections:
- **User Guide**: For organization administrators to configure SAML SSO through Prowler App.
- **Developer and Administrator Guide**: For developers and system administrators running self-hosted Prowler App instances, providing technical details on environment configuration, API usage, and testing.
---
## Environment Configuration
## User Guide: Configuring SAML SSO in Prowler App
### `DJANGO_ALLOWED_HOSTS`
Follow these steps to enable and configure SAML SSO for an organization.
Update this variable to specify which domains Django should accept incoming requests from. This typically includes:
### Key Features
- `localhost` for local development
- container hostnames (e.g. `prowler-api`)
- public-facing domains or tunnels (e.g. ngrok)
Prowler can be integrated with SAML SSO identity providers such as Okta to enable single sign-on for the organization's users. The Prowler SAML integration currently supports the following features:
**Example**:
- **IdP-Initiated SSO**: Users can initiate login from their Identity Provider's dashboard.
- **SP-Initiated SSO**: Users can initiate login directly from the Prowler login page.
- **Just-in-Time Provisioning**: Users from the organization signing into Prowler for the first time will be automatically created.
```env
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api,mycompany.prowler
```
???+ warning "Deactivate SAML"
If the SAML configuration is removed, users who previously authenticated via SAML will need to reset their password to regain access using standard login. This is because their accounts no longer have valid authentication credentials without the SAML integration.
# SAML Configuration API
### Prerequisites
You can manage SAML settings via the API. Prowler provides full CRUD support for tenant-specific SAML configuration.
- Administrator access to the Prowler organization is required.
- Administrative access to the SAML 2.0 compliant Identity Provider (e.g., Okta, Azure AD, Google Workspace) is necessary.
- GET /api/v1/saml-config: Retrieve the current configuration
### Configuration Steps
- POST /api/v1/saml-config: Create a new configuration
#### Step 1: Access Profile Settings
- PATCH /api/v1/saml-config: Update the existing configuration
To access the account settings, click the "Account" button in the top-right corner of Prowler App, or navigate directly to `https://cloud.prowler.com/profile` (or `http://localhost:3000/profile` for local setups).
- DELETE /api/v1/saml-config: Remove the current configuration
![Access Profile Settings](../img/saml/saml-step-1.png)
#### Step 2: Enable SAML Integration
???+ note "API Note"
SSO with SAML API documentation.[Prowler API Reference - Upload SAML configuration](https://api.prowler.com/api/v1/docs#tag/SAML/operation/saml_config_create)
On the profile page, find the "SAML SSO Integration" card and click "Enable" to begin the configuration process.
# SAML Initiate
![Enable SAML Integration](../img/saml/saml-step-2.png)
### Description
#### Step 3: Configure the Identity Provider (IdP)
This endpoint receives an email and checks if there is an active SAML configuration for the associated domain (i.e., the part after the @). If a configuration exists it responds with an HTTP 302 redirect to the appropriate saml_login endpoint for the organization.
The Prowler SAML configuration panel displays the information needed to configure the IdP. This information must be used to create a new SAML application in the IdP.
- POST /api/v1/accounts/saml/initiate/
1. **Assertion Consumer Service (ACS) URL**: The endpoint in Prowler that will receive the SAML assertion from the IdP.
2. **Audience URI (Entity ID)**: A unique identifier for the Prowler application (Service Provider).
???+ note
Important: This endpoint is intended to be used from a browser, as it returns a 302 redirect that needs to be followed to continue the SAML authentication flow. For testing purposes, it is better to use a browser or a tool that follows redirects (such as Postman) rather than relying on unit tests that cannot capture the redirect behavior.
To configure the IdP, copy the **ACS URL** and **Audience URI** from Prowler and use them to set up a new SAML application.
### Expected payload
```
{
"email_domain": "user@domain.com"
}
```
![IdP configuration](../img/saml/idp_config.png)
### Possible responses
???+ info "IdP Configuration"
The exact steps for configuring an IdP vary depending on the provider (Okta, Azure AD, etc.). Please refer to the IdP's documentation for instructions on creating a SAML application. For SSO integration with Azure AD / Entra ID, see our [Entra ID configuration instructions](./prowler-app-sso-entra.md).
• 302 FOUND: Redirects to the SAML login URL associated with the organization.
#### Step 4: Configure Attribute Mapping in the IdP
• 403 FORBIDDEN: The domain is not authorized.
For Prowler to correctly identify and provision users, the IdP must be configured to send the following attributes in the SAML assertion:
### Validation logic
| Attribute Name | Description | Required |
|----------------|---------------------------------------------------------------------------------------------------------|----------|
| `firstName` | The user's first name. | Yes |
| `lastName` | The user's last name. | Yes |
| `userType` | The Prowler role to be assigned to the user (e.g., `admin`, `auditor`). If a role with that name already exists, it will be used; otherwise, a new role called `no_permissions` will be created with minimal permissions. You can then edit the permissions for that role in the [RBAC Management tab](./prowler-app-rbac.md). | No |
| `companyName` | The user's company name. This is automatically populated if the IdP sends an `organization` attribute. | No |
• Looks up the domain in SAMLDomainIndex.
???+ info "IdP Attribute Mapping"
Note that the attribute name is just an example and may be different in your IdP. For instance, if your IdP provides a 'division' attribute, you can map it to 'userType'.
![IdP configuration](../img/saml/saml_attribute_statements.png)
• Retrieves the related SAMLConfiguration object via tenant_id.
???+ warning "Dynamic Updates"
These attributes are updated in Prowler each time a user logs in. Any changes made in the identity provider (IdP) will be reflected the next time the user logs in again.
#### Step 5: Upload IdP Metadata to Prowler
# SAML Integration: Testing Guide
Once the IdP is configured, it provides a **metadata XML file**. This file contains the IdP's configuration information, such as its public key and login URL.
This document outlines the process for testing the SAML integration functionality.
To complete the Prowler-side configuration:
1. Return to the Prowler SAML configuration page.
2. Enter the **email domain** for the organization (e.g., `mycompany.com`). Prowler uses this to identify users who should authenticate via SAML.
3. Upload the **metadata XML file** downloaded from the IdP.
![Configure Prowler with IdP Metadata](../img/saml/saml-step-3.png)
#### Step 6: Save and Verify Configuration
Click the "Save" button to complete the setup. The "SAML Integration" card will now show an "Active" status, indicating that the configuration is complete and enabled.
![Verify Integration Status](../img/saml/saml-step-4.png)
???+ info "IdP Configuration"
The exact steps for configuring an IdP vary depending on the provider (Okta, Azure AD, etc.). Please refer to the IdP's documentation for instructions on creating a SAML application.
##### Remove SAML Configuration
You can disable SAML SSO by removing the existing configuration from the integration panel.
![Remove SAML configuration](../img/saml/saml-step-remove.png)
### Signing in with SAML SSO
Once SAML SSO is enabled, users from the configured domain can sign in by entering their email address on the login page and clicking "Continue with SAML SSO". They will be redirected to the IdP to authenticate and then returned to Prowler.
![Sign in with SAML SSO](../img/saml/saml-step-5.png)
---
## 1. Start Ngrok and Update ALLOWED_HOSTS
## Developer and Administrator Guide
Start ngrok on port 8080:
```
This section provides technical details for developers and administrators of self-hosted Prowler instances.
### Environment Configuration
For self-hosted deployments, several environment variables must be configured to ensure SAML SSO functions correctly. These variables are typically set in an `.env` file.
| Variable | Description | Example |
|---------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------|
| `API_BASE_URL` | The base URL of the Prowler API instance. | `http://mycompany.prowler/api/v1` |
| `DJANGO_ALLOWED_HOSTS` | A comma-separated list of hostnames that the Django backend will accept requests from. Include any domains used to access the Prowler API. | `localhost,127.0.0.1,prowler-api,mycompany.prowler` |
| `AUTH_URL` | The base URL of the Prowler web UI. This is used to construct the callback URL after authentication. | `http://mycompany.prowler` |
| `SAML_SSO_CALLBACK_URL` | The full callback URL where users are redirected after authenticating with the IdP. It is typically constructed using the `AUTH_URL`. | `${AUTH_URL}/api/auth/callback/saml` |
After modifying these variables, the Prowler API must be restarted for the changes to take effect.
### SAML API Reference
Prowler provides a REST API to manage SAML configurations programmatically.
- **Endpoint**: `/api/v1/saml-config`
- **Methods**:
- `GET`: Retrieve the current SAML configuration for the tenant.
- `POST`: Create a new SAML configuration.
- `PATCH`: Update an existing SAML configuration.
- `DELETE`: Remove the SAML configuration.
???+ note "API Documentation"
For detailed information on using the API, refer to the [Prowler API Reference](https://api.prowler.com/api/v1/docs#tag/SAML/operation/saml_config_create).
#### SAML Initiate Endpoint
- **Endpoint**: `POST /api/v1/accounts/saml/initiate/`
- **Description**: This endpoint initiates the SAML login flow. It takes an email address, determines if the domain has a SAML configuration, and redirects the user to the appropriate IdP login page. It is primarily designed for browser-based flows.
### Testing SAML Integration
Follow these steps to test a SAML integration in a development environment.
#### 1. Expose the Local Environment
Since the IdP needs to send requests to the local Prowler instance, it must be exposed to the internet. A tool like `ngrok` can be used for this purpose.
To start ngrok, run the following command:
```bash
ngrok http 8080
```
This command provides a public URL (e.g., `https://<random-string>.ngrok.io`) that forwards to the local server on port 8080.
Then, copy the generated ngrok URL and include it in the ALLOWED_HOSTS setting. If you're using the development environment, it usually defaults to *, but in some cases this may not work properly, like in my tests (investigate):
#### 2. Update `DJANGO_ALLOWED_HOSTS`
```
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["*"])
To allow requests from ngrok, add its URL to the `DJANGO_ALLOWED_HOSTS` environment variable.
```env
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api,*.ngrok.io
```
## 2. Configure the Identity Provider (IdP)
#### 3. Configure the IdP
Start your environment and configure your IdP. You will need to download the IdP's metadata XML file.
When configuring the IdP for testing, use the ngrok URL for the ACS URL:
`https://<your-ngrok-url>/api/v1/accounts/saml/<YOUR_DOMAIN>/acs/`
Your Assertion Consumer Service (ACS) URL must follow this format:
#### 4. Configure Prowler via API
```
https://<PROXY_URL>/api/v1/accounts/saml/<CONFIGURED_DOMAIN>/acs/
```
## 3. IdP Attribute Mapping
The following fields are expected from the IdP:
- firstName
- lastName
- userType (this is the name of the role the user should be assigned)
- companyName (this is filled automatically if the IdP includes an "organization" field)
These values are dynamic. If the values change in the IdP, they will be updated on the next login.
## 4. SAML Configuration API (POST)
SAML configuration is managed via a CRUD API. Use the following POST request to create a new configuration:
To create a SAML configuration for testing, use `curl`. Make sure to replace placeholders with actual data.
```bash
curl --location 'http://localhost:8080/api/v1/saml-config' \
--header 'Content-Type: application/vnd.api+json' \
--header 'Accept: application/vnd.api+json' \
--header 'Authorization: Bearer <TOKEN>' \
--header 'Authorization: Bearer <YOUR_API_TOKEN>' \
--data '{
"data": {
"type": "saml-configurations",
"attributes": {
"email_domain": "prowler.com",
"metadata_xml": "<XML>"
"email_domain": "yourdomain.com",
"metadata_xml": "<PASTE_YOUR_IDP_METADATA_XML_HERE>"
}
}
}'
```
## 5. SAML SSO Callback Configuration
#### 5. Initiate Login Flow
### Environment Variable Configuration
To test the end-to-end flow, construct the login URL and open it in a browser. This will start the IdP-initiated login flow.
The SAML authentication flow requires proper callback URL configuration to handle post-authentication redirects. Configure the following environment variables:
`https://<your-ngrok-url>/api/v1/accounts/saml/<YOUR_DOMAIN>/login/`
#### `SAML_SSO_CALLBACK_URL`
Specifies the callback endpoint that will be invoked upon successful SAML authentication completion. This URL directs users back to the web application interface.
```env
SAML_SSO_CALLBACK_URL="${AUTH_URL}/api/auth/callback/saml"
```
#### `AUTH_URL`
Defines the base URL of the web user interface application that serves as the authentication callback destination.
```env
AUTH_URL="<WEB_UI_URL>"
```
### Configuration Notes
- The `SAML_SSO_CALLBACK_URL` dynamically references the `AUTH_URL` variable to construct the complete callback endpoint
- Ensure the `AUTH_URL` points to the correct web UI deployment (development, staging, or production)
- The callback endpoint `/api/auth/callback/saml` must be accessible and properly configured to handle SAML authentication responses
- Both environment variables are required for proper SAML SSO functionality
- Verify that the `NEXT_PUBLIC_API_BASE_URL` environment variable is properly configured to reference the correct API server base URL corresponding to your target deployment environment. This ensures proper routing of SAML callback requests to the appropriate backend services.
## 6. Start SAML Login Flow
Once everything is configured, start the SAML login process by visiting the following URL:
```
https://<PROXY_IP>/api/v1/accounts/saml/<CONFIGURED_DOMAIN>/login/?email=<USER_EMAIL>
```
At the end you will get a valid access and refresh token
## 7. Notes on the initiate Endpoint
The initiate endpoint is not strictly required. It was created to allow extra checks or behavior modifications (like enumeration mitigation). It also simplifies UI integration with SAML, but again, it's optional.
If successful, the user will be redirected back to the Prowler application with a valid session.

View File

@@ -54,6 +54,7 @@ nav:
- Role-Based Access Control: tutorials/prowler-app-rbac.md
- Social Login: tutorials/prowler-app-social-login.md
- SSO with SAML: tutorials/prowler-app-sso.md
- Mute findings: tutorials/prowler-app-mute-findings.md
- Lighthouse: tutorials/prowler-app-lighthouse.md
- CLI:
- Miscellaneous: tutorials/misc.md

View File

@@ -2,6 +2,34 @@
All notable changes to the **Prowler SDK** are documented in this file.
## [v5.10.0] (Prowler UNRELEASED)
### Added
- `bedrock_api_key_no_administrative_privileges` check for AWS provider [(#8321)](https://github.com/prowler-cloud/prowler/pull/8321)
- Support App Key Content in GitHub provider [(#8271)](https://github.com/prowler-cloud/prowler/pull/8271)
### Changed
- Handle some AWS errors as warnings instead of errors [(#8347)](https://github.com/prowler-cloud/prowler/pull/8347)
### Fixed
- False positives in SQS encryption check for ephemeral queues [(#8330)](https://github.com/prowler-cloud/prowler/pull/8330)
---
## [v5.9.3] (Prowler UNRELEASED)
### Fixed
- Add more validations to Azure Storage models when some values are None to avoid serialization issues [(#8325)](https://github.com/prowler-cloud/prowler/pull/8325)
---
## [v5.9.2] (Prowler v5.9.2)
### Fixed
- Use the correct resource name in `defender_domain_dkim_enabled` check [(#8334)](https://github.com/prowler-cloud/prowler/pull/8334)
---
## [v5.9.0] (Prowler v5.9.0)
### Added
@@ -32,12 +60,6 @@ All notable changes to the **Prowler SDK** are documented in this file.
- Update `entra_users_mfa_capable` check to use the correct resource name and ID [(#8288)](https://github.com/prowler-cloud/prowler/pull/8288)
- Handle multiple services and severities while listing checks [(#8302)](https://github.com/prowler-cloud/prowler/pull/8302)
- Handle `tenant_id` for M365 Mutelist [(#8306)](https://github.com/prowler-cloud/prowler/pull/8306)
---
## [v5.8.2] (Prowler 5.8.2)
### Fixed
- Fix error in Dashboard Overview page when reading CSV files [(#8257)](https://github.com/prowler-cloud/prowler/pull/8257)
---

View File

@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "5.9.0"
prowler_version = "5.10.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"

View File

@@ -353,6 +353,8 @@ class Finding(BaseModel):
finding.region = resource.region
# Azure, GCP specified field
finding.location = resource.region
# GitHub specified field
finding.owner = resource.region
# K8s specified field
if provider.type == "kubernetes":
finding.namespace = resource.region.removeprefix("namespace: ")

View File

@@ -1417,6 +1417,11 @@
"bedrock-data-automation": {
"regions": {
"aws": [
"ap-south-1",
"ap-southeast-2",
"eu-central-1",
"eu-west-1",
"eu-west-2",
"us-east-1",
"us-west-2"
],
@@ -2503,6 +2508,7 @@
"il-central-1",
"me-central-1",
"me-south-1",
"mx-central-1",
"sa-east-1",
"us-east-1",
"us-east-2",
@@ -2544,6 +2550,7 @@
"il-central-1",
"me-central-1",
"me-south-1",
"mx-central-1",
"sa-east-1",
"us-east-1",
"us-east-2",
@@ -2587,6 +2594,7 @@
"il-central-1",
"me-central-1",
"me-south-1",
"mx-central-1",
"sa-east-1",
"us-east-1",
"us-east-2",
@@ -5075,6 +5083,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -5088,6 +5097,7 @@
"il-central-1",
"me-central-1",
"me-south-1",
"mx-central-1",
"sa-east-1",
"us-east-1",
"us-east-2",
@@ -5994,6 +6004,7 @@
"aws": [
"af-south-1",
"ap-east-1",
"ap-east-2",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
@@ -7396,6 +7407,8 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ap-southeast-7",
"ca-central-1",
"eu-central-1",
"eu-central-2",
@@ -7492,6 +7505,7 @@
"aws": [
"af-south-1",
"ap-east-1",
"ap-east-2",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
@@ -8181,6 +8195,7 @@
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ap-southeast-7",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -9540,7 +9555,9 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
@@ -10090,6 +10107,7 @@
"aws": [
"af-south-1",
"ap-east-1",
"ap-east-2",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",

View File

@@ -0,0 +1,36 @@
{
"Provider": "aws",
"CheckID": "bedrock_api_key_no_administrative_privileges",
"CheckTitle": "Ensure Amazon Bedrock API keys do not have administrative privileges or privilege escalation",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards"
],
"ServiceName": "bedrock",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:iam:region:account-id:user/{user-name}/credential/{api-key-id}",
"Severity": "high",
"ResourceType": "AwsIamServiceSpecificCredential",
"Description": "Ensure that Amazon Bedrock API keys do not have administrative privileges or privilege escalation capabilities. API keys with administrative privileges can perform any action on any resource in your AWS environment, while privilege escalation allows users to grant themselves additional permissions, both posing significant security risks.",
"Risk": "Amazon Bedrock API keys with administrative privileges can perform any action on any resource in your AWS environment. Privilege escalation capabilities allow users to grant themselves additional permissions beyond their intended scope. Both violations of the principle of least privilege can lead to security vulnerabilities, data leaks, data loss, or unexpected charges if the API key is compromised or misused.",
"RelatedUrl": "https://docs.aws.amazon.com/bedrock/latest/userguide/api-keys.html",
"Remediation": {
"Code": {
"CLI": "aws iam delete-service-specific-credential --user-name <username> --service-specific-credential-id <credential-id>",
"NativeIaC": "",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "Apply the principle of least privilege to Amazon Bedrock API keys. Instead of granting administrative privileges or privilege escalation capabilities, assign only the permissions necessary for specific tasks. Create custom IAM policies with minimal permissions based on the principle of least privilege. Regularly review and audit API key permissions to ensure they cannot be used for privilege escalation.",
"Url": "https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege"
}
},
"Categories": [
"gen-ai",
"trustboundaries"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": "This check verifies that Amazon Bedrock API keys do not have administrative privileges or privilege escalation capabilities through attached IAM policies or inline policies. It follows the principle of least privilege to ensure API keys only have the minimum necessary permissions and cannot be used to escalate privileges."
}

View File

@@ -0,0 +1,57 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.iam.iam_client import iam_client
from prowler.providers.aws.services.iam.lib.policy import (
check_admin_access,
check_full_service_access,
)
from prowler.providers.aws.services.iam.lib.privilege_escalation import (
check_privilege_escalation,
)
class bedrock_api_key_no_administrative_privileges(Check):
def execute(self):
findings = []
for api_key in iam_client.service_specific_credentials:
if api_key.service_name != "bedrock.amazonaws.com":
continue
report = Check_Report_AWS(metadata=self.metadata(), resource=api_key)
report.status = "PASS"
report.status_extended = f"API key {api_key.id} in user {api_key.user.name} has no administrative privileges."
for policy in api_key.user.attached_policies:
policy_arn = policy["PolicyArn"]
if policy_arn in iam_client.policies:
policy_document = iam_client.policies[policy_arn].document
if policy_document:
if check_admin_access(policy_document):
report.status = "FAIL"
report.status_extended = f"API key {api_key.id} in user {api_key.user.name} has administrative privileges through attached policy {policy['PolicyName']}."
break
elif check_privilege_escalation(policy_document):
report.status = "FAIL"
report.status_extended = f"API key {api_key.id} in user {api_key.user.name} has privilege escalation through attached policy {policy['PolicyName']}."
break
elif check_full_service_access("bedrock", policy_document):
report.status = "FAIL"
report.status_extended = f"API key {api_key.id} in user {api_key.user.name} has full service access through attached policy {policy['PolicyName']}."
break
for inline_policy_name in api_key.user.inline_policies:
inline_policy_arn = f"{api_key.user.arn}:policy/{inline_policy_name}"
if inline_policy_arn in iam_client.policies:
policy_document = iam_client.policies[inline_policy_arn].document
if policy_document:
if check_admin_access(policy_document):
report.status = "FAIL"
report.status_extended = f"API key {api_key.id} in user {api_key.user.name} has administrative privileges through inline policy {inline_policy_name}."
break
elif check_privilege_escalation(policy_document):
report.status = "FAIL"
report.status_extended = f"API key {api_key.id} in user {api_key.user.name} has privilege escalation through inline policy {inline_policy_name}."
break
elif check_full_service_access("bedrock", policy_document):
report.status = "FAIL"
report.status_extended = f"API key {api_key.id} in user {api_key.user.name} has full service access through inline policy {inline_policy_name}."
break
findings.append(report)
return findings

View File

@@ -1,5 +1,6 @@
from typing import Optional
from botocore.client import ClientError
from pydantic.v1 import BaseModel
from prowler.lib.logger import logger
@@ -71,6 +72,17 @@ class ElasticBeanstalk(AWSService):
and option["OptionName"] == "StreamLogs"
):
environment.cloudwatch_stream_logs = option.get("Value", "false")
except ClientError as error:
if error.response["Error"]["Code"] in [
"InvalidParameterValue",
]:
logger.warning(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -84,10 +96,17 @@ class ElasticBeanstalk(AWSService):
"ResourceTags"
]
resource.tags = response
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except ClientError as error:
if error.response["Error"]["Code"] in [
"ResourceNotFoundException",
]:
logger.warning(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -6,7 +6,7 @@ from prowler.providers.aws.services.iam.lib.policy import check_admin_access
class iam_aws_attached_policy_no_administrative_privileges(Check):
def execute(self) -> Check_Report_AWS:
findings = []
for policy in iam_client.policies:
for policy in iam_client.policies.values():
# Check only for attached AWS policies
if policy.attached and policy.type == "AWS":
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)

View File

@@ -6,7 +6,7 @@ from prowler.providers.aws.services.iam.lib.policy import check_admin_access
class iam_customer_attached_policy_no_administrative_privileges(Check):
def execute(self) -> Check_Report_AWS:
findings = []
for policy in iam_client.policies:
for policy in iam_client.policies.values():
# Check only for attached custom policies
if policy.attached and policy.type == "Custom":
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)

View File

@@ -6,7 +6,7 @@ from prowler.providers.aws.services.iam.lib.policy import check_admin_access
class iam_customer_unattached_policy_no_administrative_privileges(Check):
def execute(self) -> Check_Report_AWS:
findings = []
for policy in iam_client.policies:
for policy in iam_client.policies.values():
# Check only for cutomer unattached policies
if not policy.attached and policy.type == "Custom":
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)

View File

@@ -9,7 +9,7 @@ class iam_inline_policy_allows_privilege_escalation(Check):
def execute(self) -> Check_Report_AWS:
findings = []
for policy in iam_client.policies:
for policy in iam_client.policies.values():
if policy.type == "Inline":
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)
report.resource_id = f"{policy.entity}/{policy.name}"

View File

@@ -6,7 +6,7 @@ from prowler.providers.aws.services.iam.lib.policy import check_admin_access
class iam_inline_policy_no_administrative_privileges(Check):
def execute(self) -> Check_Report_AWS:
findings = []
for policy in iam_client.policies:
for policy in iam_client.policies.values():
if policy.type == "Inline":
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)
report.region = iam_client.region

View File

@@ -9,7 +9,7 @@ class iam_inline_policy_no_full_access_to_cloudtrail(Check):
def execute(self) -> Check_Report_AWS:
findings = []
for policy in iam_client.policies:
for policy in iam_client.policies.values():
# Check only inline policies
if policy.type == "Inline":
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)

View File

@@ -9,7 +9,7 @@ class iam_inline_policy_no_full_access_to_kms(Check):
def execute(self):
findings = []
for policy in iam_client.policies:
for policy in iam_client.policies.values():
if policy.type == "Inline":
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)
report.region = iam_client.region

View File

@@ -13,7 +13,7 @@ class iam_no_custom_policy_permissive_role_assumption(Check):
return any("*" in r for r in resource)
return False
for policy in iam_client.policies:
for policy in iam_client.policies.values():
# Check only custom policies
if policy.type == "Custom":
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)

View File

@@ -9,7 +9,7 @@ class iam_policy_allows_privilege_escalation(Check):
def execute(self) -> Check_Report_AWS:
findings = []
for policy in iam_client.policies:
for policy in iam_client.policies.values():
if policy.type == "Custom":
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)
report.region = iam_client.region

View File

@@ -8,7 +8,7 @@ critical_service = "cloudtrail"
class iam_policy_no_full_access_to_cloudtrail(Check):
def execute(self) -> Check_Report_AWS:
findings = []
for policy in iam_client.policies:
for policy in iam_client.policies.values():
# Check only custom policies
if policy.type == "Custom":
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)

View File

@@ -8,7 +8,7 @@ critical_service = "kms"
class iam_policy_no_full_access_to_kms(Check):
def execute(self) -> Check_Report_AWS:
findings = []
for policy in iam_client.policies:
for policy in iam_client.policies.values():
# Check only custom policies
if policy.type == "Custom":
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)

View File

@@ -77,13 +77,15 @@ class IAM(AWSService):
cloudshell_admin_policy_arn
)
# List both Customer (attached and unattached) and AWS Managed (only attached) policies
self.policies = []
self.policies.extend(self._list_policies("AWS"))
self.policies.extend(self._list_policies("Local"))
self.policies = {}
self.policies.update(self._list_policies("AWS"))
self.policies.update(self._list_policies("Local"))
self._list_policies_version(self.policies)
self._list_inline_user_policies()
self._list_inline_group_policies()
self._list_inline_role_policies()
self.service_specific_credentials = []
self._list_service_specific_credentials()
self.saml_providers = self._list_saml_providers()
self.server_certificates = self._list_server_certificates()
self.access_keys_metadata = {}
@@ -99,7 +101,7 @@ class IAM(AWSService):
self.__threading_call__(self._list_tags, self.roles)
self.__threading_call__(
self._list_tags,
[policy for policy in self.policies if policy.type == "Custom"],
[policy for policy in self.policies.values() if policy.type == "Custom"],
)
self.__threading_call__(self._list_tags, self.server_certificates)
if self.saml_providers is not None:
@@ -514,16 +516,15 @@ class IAM(AWSService):
UserName=user.name, PolicyName=policy
)
inline_user_policy_doc = inline_policy["PolicyDocument"]
self.policies.append(
Policy(
name=policy,
arn=user.arn,
entity=user.name,
type="Inline",
attached=True,
version_id="v1",
document=inline_user_policy_doc,
)
inline_user_policy_arn = f"{user.arn}:policy/{policy}"
self.policies[inline_user_policy_arn] = Policy(
name=policy,
arn=user.arn,
entity=user.name,
type="Inline",
attached=True,
version_id="v1",
document=inline_user_policy_doc,
)
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchEntity":
@@ -572,16 +573,15 @@ class IAM(AWSService):
GroupName=group.name, PolicyName=policy
)
inline_group_policy_doc = inline_policy["PolicyDocument"]
self.policies.append(
Policy(
name=policy,
arn=group.arn,
entity=group.name,
type="Inline",
attached=True,
version_id="v1",
document=inline_group_policy_doc,
)
inline_group_policy_arn = f"{group.arn}:policy/{policy}"
self.policies[inline_group_policy_arn] = Policy(
name=policy,
arn=group.arn,
entity=group.name,
type="Inline",
attached=True,
version_id="v1",
document=inline_group_policy_doc,
)
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchEntity":
@@ -633,16 +633,15 @@ class IAM(AWSService):
RoleName=role.name, PolicyName=policy
)
inline_role_policy_doc = inline_policy["PolicyDocument"]
self.policies.append(
Policy(
name=policy,
arn=role.arn,
entity=role.name,
type="Inline",
attached=True,
version_id="v1",
document=inline_role_policy_doc,
)
inline_role_policy_arn = f"{role.arn}:policy/{policy}"
self.policies[inline_role_policy_arn] = Policy(
name=policy,
arn=role.arn,
entity=role.name,
type="Inline",
attached=True,
version_id="v1",
document=inline_role_policy_doc,
)
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchEntity":
@@ -742,7 +741,7 @@ class IAM(AWSService):
def _list_policies(self, scope):
logger.info("IAM - List Policies...")
try:
policies = []
policies = {}
list_policies_paginator = self.client.get_paginator("list_policies")
for page in list_policies_paginator.paginate(
Scope=scope, OnlyAttached=False if scope == "Local" else True
@@ -751,17 +750,13 @@ class IAM(AWSService):
if not self.audit_resources or (
is_resource_filtered(policy["Arn"], self.audit_resources)
):
policies.append(
Policy(
name=policy["PolicyName"],
arn=policy["Arn"],
entity=policy["PolicyId"],
version_id=policy["DefaultVersionId"],
type="Custom" if scope == "Local" else "AWS",
attached=(
True if policy["AttachmentCount"] > 0 else False
),
)
policies[policy["Arn"]] = Policy(
name=policy["PolicyName"],
arn=policy["Arn"],
entity=policy["PolicyId"],
version_id=policy["DefaultVersionId"],
type="Custom" if scope == "Local" else "AWS",
attached=(True if policy["AttachmentCount"] > 0 else False),
)
except Exception as error:
logger.error(
@@ -773,7 +768,7 @@ class IAM(AWSService):
def _list_policies_version(self, policies):
logger.info("IAM - List Policies Version...")
try:
for policy in policies:
for policy in policies.values():
try:
policy_version = self.client.get_policy_version(
PolicyArn=policy.arn, VersionId=policy.version_id
@@ -870,7 +865,10 @@ class IAM(AWSService):
SAMLProviderArn=resource.arn
).get("Tags", [])
except Exception as error:
if error.response["Error"]["Code"] == "NoSuchEntityException":
if error.response["Error"]["Code"] in [
"NoSuchEntity",
"NoSuchEntityException",
]:
logger.warning(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
@@ -1019,6 +1017,43 @@ class IAM(AWSService):
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _list_service_specific_credentials(self):
logger.info("IAM - List Service Specific Credentials...")
try:
for user in self.users:
service_specific_credentials = (
self.client.list_service_specific_credentials(UserName=user.name)
)
for credential in service_specific_credentials.get(
"ServiceSpecificCredentials", []
):
credential["Arn"] = (
f"arn:{self.audited_partition}:iam:{self.region}:{self.audited_account}:user/{user.name}/credential/{credential['ServiceSpecificCredentialId']}"
)
if not self.audit_resources or (
is_resource_filtered(credential["Arn"], self.audit_resources)
):
self.service_specific_credentials.append(
ServiceSpecificCredential(
arn=credential["Arn"],
user=user,
status=credential["Status"],
create_date=credential["CreateDate"],
service_user_name=credential.get("ServiceUserName"),
service_credential_alias=credential.get(
"ServiceCredentialAlias"
),
expiration_date=credential.get("ExpirationDate"),
id=credential.get("ServiceSpecificCredentialId"),
service_name=credential.get("ServiceName"),
region=self.region,
)
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
class MFADevice(BaseModel):
serial_number: str
@@ -1046,6 +1081,19 @@ class Role(BaseModel):
tags: Optional[list]
class ServiceSpecificCredential(BaseModel):
arn: str
user: User
status: str
create_date: datetime
service_user_name: Optional[str]
service_credential_alias: Optional[str]
expiration_date: Optional[datetime]
id: str
service_name: str
region: str
class Group(BaseModel):
name: str
arn: str

View File

@@ -2,6 +2,7 @@ import json
from datetime import datetime, timezone
from typing import Dict, List, Optional
from botocore.client import ClientError
from pydantic.v1 import BaseModel, Field
from prowler.lib.logger import logger
@@ -67,6 +68,21 @@ class SecretsManager(AWSService):
)
if secret_policy.get("ResourcePolicy"):
secret.policy = json.loads(secret_policy["ResourcePolicy"])
except ClientError as error:
if error.response["Error"]["Code"] in [
"ResourceNotFoundException",
]:
logger.warning(
f"{self.region} --"
f" {error.__class__.__name__}[{error.__traceback__.tb_lineno}]:"
f" {error}"
)
else:
logger.error(
f"{self.region} --"
f" {error.__class__.__name__}[{error.__traceback__.tb_lineno}]:"
f" {error}"
)
except Exception as error:
logger.error(
f"{self.region} --"

View File

@@ -51,6 +51,7 @@ class SQS(AWSService):
def _get_queue_attributes(self):
try:
logger.info("SQS - describing queue attributes...")
valid_queues = []
for queue in self.queues:
try:
regional_client = self.regional_clients[queue.region]
@@ -72,6 +73,7 @@ class SQS(AWSService):
== "true"
):
queue.kms_key_id = "SqsManagedSseEnabled"
valid_queues.append(queue)
except ClientError as error:
if (
error.response["Error"]["Code"]
@@ -84,10 +86,13 @@ class SQS(AWSService):
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
valid_queues.append(queue)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
valid_queues.append(queue)
self.queues = valid_queues
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -99,6 +99,21 @@ class SSM(AWSService):
"AccountIds"
]
except ClientError as error:
if error.response["Error"]["Code"] in [
"InvalidDocumentOperation",
]:
logger.warning(
f"{regional_client.region} --"
f" {error.__class__.__name__}[{error.__traceback__.tb_lineno}]:"
f" {error}"
)
else:
logger.error(
f"{regional_client.region} --"
f" {error.__class__.__name__}[{error.__traceback__.tb_lineno}]:"
f" {error}"
)
except Exception as error:
logger.error(
f"{regional_client.region} --"

View File

@@ -70,17 +70,44 @@ class Storage(AzureService):
],
key_expiration_period_in_days=key_expiration_period_in_days,
location=storage_account.location,
default_to_entra_authorization=getattr(
storage_account,
"default_to_o_auth_authentication",
False,
default_to_entra_authorization=(
False
if getattr(
storage_account,
"default_to_o_auth_authentication",
False,
)
is None
else getattr(
storage_account,
"default_to_o_auth_authentication",
False,
)
),
replication_settings=replication_settings,
allow_cross_tenant_replication=getattr(
storage_account, "allow_cross_tenant_replication", True
allow_cross_tenant_replication=(
True
if getattr(
storage_account,
"allow_cross_tenant_replication",
True,
)
is None
else getattr(
storage_account,
"allow_cross_tenant_replication",
True,
)
),
allow_shared_key_access=getattr(
storage_account, "allow_shared_key_access", True
allow_shared_key_access=(
True
if getattr(
storage_account, "allow_shared_key_access", True
)
is None
else getattr(
storage_account, "allow_shared_key_access", True
)
),
)
)

View File

@@ -99,6 +99,7 @@ class GithubProvider(Provider):
personal_access_token: str = "",
oauth_app_token: str = "",
github_app_key: str = "",
github_app_key_content: str = "",
github_app_id: int = 0,
# Provider configuration
config_path: str = None,
@@ -114,6 +115,7 @@ class GithubProvider(Provider):
personal_access_token (str): GitHub personal access token.
oauth_app_token (str): GitHub OAuth App token.
github_app_key (str): GitHub App key.
github_app_key_content (str): GitHub App key content.
github_app_id (int): GitHub App ID.
config_path (str): Path to the audit configuration file.
config_content (dict): Audit configuration content.
@@ -128,6 +130,7 @@ class GithubProvider(Provider):
oauth_app_token,
github_app_id,
github_app_key,
github_app_key_content,
)
# Set the authentication method

View File

@@ -26,7 +26,7 @@ class defender_domain_dkim_enabled(Check):
report = CheckReportM365(
metadata=self.metadata(),
resource=config,
resource_name="DKIM Configuration",
resource_name=config.id,
resource_id=config.id,
)
report.status = "FAIL"

View File

@@ -71,7 +71,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
name = "prowler"
readme = "README.md"
requires-python = ">3.9.1,<3.13"
version = "5.9.0"
version = "5.10.0"
[project.scripts]
prowler = "prowler.__main__:prowler"

View File

@@ -0,0 +1,256 @@
#!/usr/bin/env python
"""
Security test for prowler-wrapper.py command injection vulnerability
This test demonstrates the command injection vulnerability and validates the fix
"""
import os
import shutil
import sys
import tempfile
import unittest
from unittest.mock import MagicMock, patch
class TestProwlerWrapperSecurity(unittest.TestCase):
"""Test cases for command injection vulnerability in prowler-wrapper.py"""
def setUp(self):
"""Set up test environment"""
# Create a temporary directory for testing
self.test_dir = tempfile.mkdtemp()
self.prowler_wrapper_path = os.path.join(
os.path.dirname(
os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
)
),
"contrib",
"wazuh",
"prowler-wrapper.py",
)
def tearDown(self):
"""Clean up test environment"""
shutil.rmtree(self.test_dir, ignore_errors=True)
def _import_prowler_wrapper(self):
"""Helper to import prowler_wrapper with mocked WAZUH_PATH"""
sys.path.insert(0, os.path.dirname(self.prowler_wrapper_path))
# Mock the WAZUH_PATH that's read at module level
with patch("builtins.open", create=True) as mock_open:
mock_open.return_value.readline.return_value = 'DIRECTORY="/opt/wazuh"'
import importlib.util
spec = importlib.util.spec_from_file_location(
"prowler_wrapper", self.prowler_wrapper_path
)
prowler_wrapper = importlib.util.module_from_spec(spec)
spec.loader.exec_module(prowler_wrapper)
return prowler_wrapper._run_prowler
def test_command_injection_semicolon(self):
"""Test command injection using semicolon"""
# Create a test file that should not be created if injection is prevented
test_file = os.path.join(self.test_dir, "pwned.txt")
# Malicious profile that attempts to create a file
malicious_profile = f"test; touch {test_file}"
# Mock the subprocess.Popen to capture the command
with patch("subprocess.Popen") as mock_popen:
mock_process = MagicMock()
mock_process.communicate.return_value = (b"test output", None)
mock_popen.return_value = mock_process
# Import and run the vulnerable function
_run_prowler = self._import_prowler_wrapper()
# Run with malicious input
_run_prowler(f'-p "{malicious_profile}" -V')
# Check that Popen was called
self.assertTrue(mock_popen.called)
# Get the actual command that was passed to Popen
actual_command = mock_popen.call_args[0][0]
# With the fix, the command should be a list (from shlex.split)
# and should NOT have shell=True
self.assertIsInstance(
actual_command, list, "Command should be a list after shlex.split"
)
# Check that shell=True is not in the call
call_kwargs = mock_popen.call_args[1]
self.assertNotIn(
"shell",
call_kwargs,
"shell parameter should not be present (defaults to False)",
)
def test_command_injection_ampersand(self):
"""Test command injection using ampersand"""
# Create a test file that should not be created if injection is prevented
test_file = os.path.join(self.test_dir, "pwned2.txt")
# Malicious profile that attempts to create a file
malicious_profile = f"test && touch {test_file}"
with patch("subprocess.Popen") as mock_popen:
mock_process = MagicMock()
mock_process.communicate.return_value = (b"test output", None)
mock_popen.return_value = mock_process
# Import and run the function
_run_prowler = self._import_prowler_wrapper()
# Run with malicious input
_run_prowler(f'-p "{malicious_profile}" -V')
# Get the actual command
actual_command = mock_popen.call_args[0][0]
# Verify it's a list (safe execution)
self.assertIsInstance(actual_command, list)
# The malicious characters should be preserved as part of the argument
# not interpreted as shell commands
command_str = " ".join(actual_command)
self.assertIn(
"&&",
command_str,
"Shell metacharacters should be preserved as literals",
)
def test_command_injection_pipe(self):
"""Test command injection using pipe"""
malicious_profile = 'test | echo "injected"'
with patch("subprocess.Popen") as mock_popen:
mock_process = MagicMock()
mock_process.communicate.return_value = (b"test output", None)
mock_popen.return_value = mock_process
# Import and run the function
_run_prowler = self._import_prowler_wrapper()
# Run with malicious input
_run_prowler(f'-p "{malicious_profile}" -V')
# Get the actual command
actual_command = mock_popen.call_args[0][0]
# Verify safe execution
self.assertIsInstance(actual_command, list)
# Pipe should be preserved as literal
command_str = " ".join(actual_command)
self.assertIn("|", command_str)
def test_command_injection_backticks(self):
"""Test command injection using backticks"""
malicious_profile = "test `echo injected`"
with patch("subprocess.Popen") as mock_popen:
mock_process = MagicMock()
mock_process.communicate.return_value = (b"test output", None)
mock_popen.return_value = mock_process
# Import and run the function
_run_prowler = self._import_prowler_wrapper()
# Run with malicious input
_run_prowler(f'-p "{malicious_profile}" -V')
# Get the actual command
actual_command = mock_popen.call_args[0][0]
# Verify safe execution
self.assertIsInstance(actual_command, list)
# Backticks should be preserved as literals
command_str = " ".join(actual_command)
self.assertIn("`", command_str)
def test_command_injection_dollar_parentheses(self):
"""Test command injection using $() syntax"""
malicious_profile = "test $(echo injected)"
with patch("subprocess.Popen") as mock_popen:
mock_process = MagicMock()
mock_process.communicate.return_value = (b"test output", None)
mock_popen.return_value = mock_process
# Import and run the function
_run_prowler = self._import_prowler_wrapper()
# Run with malicious input
_run_prowler(f'-p "{malicious_profile}" -V')
# Get the actual command
actual_command = mock_popen.call_args[0][0]
# Verify safe execution
self.assertIsInstance(actual_command, list)
# $() should be preserved as literals
command_str = " ".join(actual_command)
self.assertIn("$(", command_str)
def test_legitimate_profile_name(self):
"""Test that legitimate profile names still work correctly"""
legitimate_profile = "production-aws-profile"
with patch("subprocess.Popen") as mock_popen:
mock_process = MagicMock()
mock_process.communicate.return_value = (b"test output", None)
mock_popen.return_value = mock_process
# Import and run the function
_run_prowler = self._import_prowler_wrapper()
# Run with legitimate input
result = _run_prowler(f"-p {legitimate_profile} -V")
# Verify the function returns output
self.assertEqual(result, b"test output")
# Verify Popen was called correctly
actual_command = mock_popen.call_args[0][0]
self.assertIsInstance(actual_command, list)
# Check the profile is passed correctly
command_str = " ".join(actual_command)
self.assertIn(legitimate_profile, command_str)
def test_shlex_split_behavior(self):
"""Test that shlex properly handles quoted arguments"""
profile_with_spaces = "my profile name"
with patch("subprocess.Popen") as mock_popen:
mock_process = MagicMock()
mock_process.communicate.return_value = (b"test output", None)
mock_popen.return_value = mock_process
# Import and run the function
_run_prowler = self._import_prowler_wrapper()
# Run with profile containing spaces
_run_prowler(f'-p "{profile_with_spaces}" -V')
# Get the actual command
actual_command = mock_popen.call_args[0][0]
# Verify it's properly split
self.assertIsInstance(actual_command, list)
# The profile name should be preserved as a single argument
# despite containing spaces
self.assertIn("my profile name", actual_command)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,618 @@
from datetime import timezone
from json import dumps
from unittest import mock
from boto3 import client
from moto import mock_aws
from tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider
# Test policy documents
ADMIN_POLICY = {
"Version": "2012-10-17",
"Statement": [{"Effect": "Allow", "Action": ["*"], "Resource": "*"}],
}
NON_ADMIN_POLICY = {
"Version": "2012-10-17",
"Statement": [{"Effect": "Allow", "Action": ["bedrock:*"], "Resource": "*"}],
}
PRIVILEGE_ESCALATION_POLICY = {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"iam:CreateAccessKey",
"iam:CreateUser",
"iam:AttachUserPolicy",
],
"Resource": "*",
}
],
}
class Test_bedrock_api_key_no_administrative_privileges:
@mock_aws
def test_no_bedrock_api_keys(self):
from prowler.providers.aws.services.iam.iam_service import IAM
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges.iam_client",
new=IAM(aws_provider),
),
):
from prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges import (
bedrock_api_key_no_administrative_privileges,
)
check = bedrock_api_key_no_administrative_privileges()
result = check.execute()
assert len(result) == 0
@mock_aws
def test_bedrock_api_key_with_admin_attached_policy(self):
iam_client = client("iam", region_name=AWS_REGION_US_EAST_1)
# Create user
user_name = "test_user"
user_arn = iam_client.create_user(UserName=user_name)["User"]["Arn"]
# Create admin policy
admin_policy_arn = iam_client.create_policy(
PolicyName="AdminPolicy",
PolicyDocument=dumps(ADMIN_POLICY),
Path="/",
)["Policy"]["Arn"]
# Attach admin policy to user
iam_client.attach_user_policy(UserName=user_name, PolicyArn=admin_policy_arn)
from prowler.providers.aws.services.iam.iam_service import IAM
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
iam = IAM(aws_provider)
# Mock service-specific credentials
from datetime import datetime
from prowler.providers.aws.services.iam.iam_service import (
ServiceSpecificCredential,
User,
)
# Create a mock user with the attached policy
mock_user = User(
name=user_name,
arn=user_arn,
attached_policies=[
{"PolicyArn": admin_policy_arn, "PolicyName": "AdminPolicy"}
],
inline_policies=[],
)
# Create a mock service-specific credential
mock_credential = ServiceSpecificCredential(
arn=f"arn:aws:iam:{AWS_REGION_US_EAST_1}:123456789012:user/{user_name}/credential/test-credential-id",
user=mock_user,
status="Active",
create_date=datetime.now(timezone.utc),
service_user_name=None,
service_credential_alias=None,
expiration_date=None,
id="test-credential-id",
service_name="bedrock.amazonaws.com",
region=AWS_REGION_US_EAST_1,
)
iam.service_specific_credentials = [mock_credential]
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges.iam_client",
new=iam,
),
):
from prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges import (
bedrock_api_key_no_administrative_privileges,
)
check = bedrock_api_key_no_administrative_privileges()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "API key test-credential-id in user test_user has administrative privileges through attached policy AdminPolicy."
)
assert result[0].resource_id == "test-credential-id"
assert result[0].region == AWS_REGION_US_EAST_1
@mock_aws
def test_bedrock_api_key_with_admin_inline_policy(self):
iam_client = client("iam", region_name=AWS_REGION_US_EAST_1)
# Create user
user_name = "test_user"
user_arn = iam_client.create_user(UserName=user_name)["User"]["Arn"]
# Create inline admin policy
iam_client.put_user_policy(
UserName=user_name,
PolicyName="AdminInlinePolicy",
PolicyDocument=dumps(ADMIN_POLICY),
)
from prowler.providers.aws.services.iam.iam_service import IAM
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
iam = IAM(aws_provider)
# Mock service-specific credentials
from datetime import datetime
from prowler.providers.aws.services.iam.iam_service import (
ServiceSpecificCredential,
User,
)
# Create a mock user with the inline policy
mock_user = User(
name=user_name,
arn=user_arn,
attached_policies=[],
inline_policies=["AdminInlinePolicy"],
)
# Create a mock service-specific credential
mock_credential = ServiceSpecificCredential(
arn=f"arn:aws:iam:{AWS_REGION_US_EAST_1}:123456789012:user/{user_name}/credential/test-credential-id",
user=mock_user,
status="Active",
create_date=datetime.now(timezone.utc),
service_user_name=None,
service_credential_alias=None,
expiration_date=None,
id="test-credential-id",
service_name="bedrock.amazonaws.com",
region=AWS_REGION_US_EAST_1,
)
iam.service_specific_credentials = [mock_credential]
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges.iam_client",
new=iam,
),
):
from prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges import (
bedrock_api_key_no_administrative_privileges,
)
check = bedrock_api_key_no_administrative_privileges()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "API key test-credential-id in user test_user has administrative privileges through inline policy AdminInlinePolicy."
)
assert result[0].resource_id == "test-credential-id"
assert result[0].region == AWS_REGION_US_EAST_1
@mock_aws
def test_bedrock_api_key_with_privilege_escalation_attached_policy(self):
iam_client = client("iam", region_name=AWS_REGION_US_EAST_1)
# Create user
user_name = "test_user"
user_arn = iam_client.create_user(UserName=user_name)["User"]["Arn"]
# Create privilege escalation policy
escalation_policy_arn = iam_client.create_policy(
PolicyName="EscalationPolicy",
PolicyDocument=dumps(PRIVILEGE_ESCALATION_POLICY),
Path="/",
)["Policy"]["Arn"]
# Attach privilege escalation policy to user
iam_client.attach_user_policy(
UserName=user_name, PolicyArn=escalation_policy_arn
)
from prowler.providers.aws.services.iam.iam_service import IAM
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
iam = IAM(aws_provider)
# Mock service-specific credentials
from datetime import datetime
from prowler.providers.aws.services.iam.iam_service import (
ServiceSpecificCredential,
User,
)
# Create a mock user with the attached policy
mock_user = User(
name=user_name,
arn=user_arn,
attached_policies=[
{"PolicyArn": escalation_policy_arn, "PolicyName": "EscalationPolicy"}
],
inline_policies=[],
)
# Create a mock service-specific credential
mock_credential = ServiceSpecificCredential(
arn=f"arn:aws:iam:{AWS_REGION_US_EAST_1}:123456789012:user/{user_name}/credential/test-credential-id",
user=mock_user,
status="Active",
create_date=datetime.now(timezone.utc),
service_user_name=None,
service_credential_alias=None,
expiration_date=None,
id="test-credential-id",
service_name="bedrock.amazonaws.com",
region=AWS_REGION_US_EAST_1,
)
iam.service_specific_credentials = [mock_credential]
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges.iam_client",
new=iam,
),
):
from prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges import (
bedrock_api_key_no_administrative_privileges,
)
check = bedrock_api_key_no_administrative_privileges()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "API key test-credential-id in user test_user has privilege escalation through attached policy EscalationPolicy."
)
assert result[0].resource_id == "test-credential-id"
assert result[0].region == AWS_REGION_US_EAST_1
@mock_aws
def test_bedrock_api_key_with_privilege_escalation_inline_policy(self):
iam_client = client("iam", region_name=AWS_REGION_US_EAST_1)
# Create user
user_name = "test_user"
user_arn = iam_client.create_user(UserName=user_name)["User"]["Arn"]
# Create inline privilege escalation policy
iam_client.put_user_policy(
UserName=user_name,
PolicyName="EscalationInlinePolicy",
PolicyDocument=dumps(PRIVILEGE_ESCALATION_POLICY),
)
from prowler.providers.aws.services.iam.iam_service import IAM
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
iam = IAM(aws_provider)
# Mock service-specific credentials
from datetime import datetime
from prowler.providers.aws.services.iam.iam_service import (
ServiceSpecificCredential,
User,
)
# Create a mock user with the inline policy
mock_user = User(
name=user_name,
arn=user_arn,
attached_policies=[],
inline_policies=["EscalationInlinePolicy"],
)
# Create a mock service-specific credential
mock_credential = ServiceSpecificCredential(
arn=f"arn:aws:iam:{AWS_REGION_US_EAST_1}:123456789012:user/{user_name}/credential/test-credential-id",
user=mock_user,
status="Active",
create_date=datetime.now(timezone.utc),
service_user_name=None,
service_credential_alias=None,
expiration_date=None,
id="test-credential-id",
service_name="bedrock.amazonaws.com",
region=AWS_REGION_US_EAST_1,
)
iam.service_specific_credentials = [mock_credential]
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges.iam_client",
new=iam,
),
):
from prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges import (
bedrock_api_key_no_administrative_privileges,
)
check = bedrock_api_key_no_administrative_privileges()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "API key test-credential-id in user test_user has privilege escalation through inline policy EscalationInlinePolicy."
)
assert result[0].resource_id == "test-credential-id"
assert result[0].region == AWS_REGION_US_EAST_1
@mock_aws
def test_bedrock_api_key_with_non_admin_policy(self):
iam_client = client("iam", region_name=AWS_REGION_US_EAST_1)
# Create user
user_name = "test_user"
user_arn = iam_client.create_user(UserName=user_name)["User"]["Arn"]
# Create non-admin policy
non_admin_policy_arn = iam_client.create_policy(
PolicyName="NonAdminPolicy",
PolicyDocument=dumps(NON_ADMIN_POLICY),
Path="/",
)["Policy"]["Arn"]
# Attach non-admin policy to user
iam_client.attach_user_policy(
UserName=user_name, PolicyArn=non_admin_policy_arn
)
from prowler.providers.aws.services.iam.iam_service import IAM
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
iam = IAM(aws_provider)
# Mock service-specific credentials
from datetime import datetime
from prowler.providers.aws.services.iam.iam_service import (
ServiceSpecificCredential,
User,
)
# Create a mock user with the attached policy
mock_user = User(
name=user_name,
arn=user_arn,
attached_policies=[
{"PolicyArn": non_admin_policy_arn, "PolicyName": "NonAdminPolicy"}
],
inline_policies=[],
)
# Create a mock service-specific credential
mock_credential = ServiceSpecificCredential(
arn=f"arn:aws:iam:{AWS_REGION_US_EAST_1}:123456789012:user/{user_name}/credential/test-credential-id",
user=mock_user,
status="Active",
create_date=datetime.now(timezone.utc),
service_user_name=None,
service_credential_alias=None,
expiration_date=None,
id="test-credential-id",
service_name="bedrock.amazonaws.com",
region=AWS_REGION_US_EAST_1,
)
iam.service_specific_credentials = [mock_credential]
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges.iam_client",
new=iam,
),
):
from prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges import (
bedrock_api_key_no_administrative_privileges,
)
check = bedrock_api_key_no_administrative_privileges()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "API key test-credential-id in user test_user has full service access through attached policy NonAdminPolicy."
)
assert result[0].resource_id == "test-credential-id"
assert result[0].region == AWS_REGION_US_EAST_1
@mock_aws
def test_bedrock_api_key_with_no_policies(self):
iam_client = client("iam", region_name=AWS_REGION_US_EAST_1)
# Create user
user_name = "test_user"
user_arn = iam_client.create_user(UserName=user_name)["User"]["Arn"]
from prowler.providers.aws.services.iam.iam_service import IAM
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
iam = IAM(aws_provider)
# Mock service-specific credentials
from datetime import datetime
from prowler.providers.aws.services.iam.iam_service import (
ServiceSpecificCredential,
User,
)
# Create a mock user with no policies
mock_user = User(
name=user_name,
arn=user_arn,
attached_policies=[],
inline_policies=[],
)
# Create a mock service-specific credential
mock_credential = ServiceSpecificCredential(
arn=f"arn:aws:iam:{AWS_REGION_US_EAST_1}:123456789012:user/{user_name}/credential/test-credential-id",
user=mock_user,
status="Active",
create_date=datetime.now(timezone.utc),
service_user_name=None,
service_credential_alias=None,
expiration_date=None,
id="test-credential-id",
service_name="bedrock.amazonaws.com",
region=AWS_REGION_US_EAST_1,
)
iam.service_specific_credentials = [mock_credential]
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges.iam_client",
new=iam,
),
):
from prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges import (
bedrock_api_key_no_administrative_privileges,
)
check = bedrock_api_key_no_administrative_privileges()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "API key test-credential-id in user test_user has no administrative privileges."
)
assert result[0].resource_id == "test-credential-id"
assert result[0].region == AWS_REGION_US_EAST_1
@mock_aws
def test_non_bedrock_api_key_ignored(self):
iam_client = client("iam", region_name=AWS_REGION_US_EAST_1)
# Create user
user_name = "test_user"
user_arn = iam_client.create_user(UserName=user_name)["User"]["Arn"]
# Create admin policy
admin_policy_arn = iam_client.create_policy(
PolicyName="AdminPolicy",
PolicyDocument=dumps(ADMIN_POLICY),
Path="/",
)["Policy"]["Arn"]
# Attach admin policy to user
iam_client.attach_user_policy(UserName=user_name, PolicyArn=admin_policy_arn)
from prowler.providers.aws.services.iam.iam_service import IAM
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
iam = IAM(aws_provider)
# Mock service-specific credentials
from datetime import datetime
from prowler.providers.aws.services.iam.iam_service import (
ServiceSpecificCredential,
User,
)
# Create a mock user with the attached policy
mock_user = User(
name=user_name,
arn=user_arn,
attached_policies=[
{"PolicyArn": admin_policy_arn, "PolicyName": "AdminPolicy"}
],
inline_policies=[],
)
# Create a mock service-specific credential for a different service (not Bedrock)
mock_credential = ServiceSpecificCredential(
arn=f"arn:aws:iam:{AWS_REGION_US_EAST_1}:123456789012:user/{user_name}/credential/test-credential-id",
user=mock_user,
status="Active",
create_date=datetime.now(timezone.utc),
service_user_name=None,
service_credential_alias=None,
expiration_date=None,
id="test-credential-id",
service_name="codecommit.amazonaws.com",
region=AWS_REGION_US_EAST_1,
)
iam.service_specific_credentials = [mock_credential]
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges.iam_client",
new=iam,
),
):
from prowler.providers.aws.services.bedrock.bedrock_api_key_no_administrative_privileges.bedrock_api_key_no_administrative_privileges import (
bedrock_api_key_no_administrative_privileges,
)
check = bedrock_api_key_no_administrative_privileges()
result = check.execute()
# Should return 0 results since the API key is not for Bedrock
assert len(result) == 0

View File

@@ -760,7 +760,7 @@ class Test_IAM_Service:
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
iam = IAM(aws_provider)
custom_policies = 0
for policy in iam.policies:
for policy in iam.policies.values():
if policy.type == "Custom":
custom_policies += 1
assert policy.name == "policy1"
@@ -786,7 +786,7 @@ class Test_IAM_Service:
iam = IAM(aws_provider)
custom_policies = 0
for policy in iam.policies:
for policy in iam.policies.values():
if policy.type == "Custom":
custom_policies += 1
assert policy.name == "policy2"
@@ -872,7 +872,7 @@ nTTxU4a7x1naFxzYXK1iQ1vMARKMjDb19QEJIEJKZlDK4uS7yMlf1nFS
assert iam.users[0].tags == []
# TODO: Workaround until this gets fixed https://github.com/getmoto/moto/issues/6712
for policy in iam.policies:
for policy in iam.policies.values():
if policy.name == policy_name:
assert policy == Policy(
name=policy_name,
@@ -914,7 +914,7 @@ nTTxU4a7x1naFxzYXK1iQ1vMARKMjDb19QEJIEJKZlDK4uS7yMlf1nFS
assert iam.groups[0].users == []
# TODO: Workaround until this gets fixed https://github.com/getmoto/moto/issues/6712
for policy in iam.policies:
for policy in iam.policies.values():
if policy.name == policy_name:
assert policy == Policy(
name=policy_name,
@@ -960,7 +960,7 @@ nTTxU4a7x1naFxzYXK1iQ1vMARKMjDb19QEJIEJKZlDK4uS7yMlf1nFS
assert iam.roles[0].tags == []
# TODO: Workaround until this gets fixed https://github.com/getmoto/moto/issues/6712
for policy in iam.policies:
for policy in iam.policies.values():
if policy.name == policy_name:
assert policy == Policy(
name=policy_name,

View File

@@ -43,7 +43,7 @@ class Test_defender_domain_dkim_enabled:
== "DKIM is enabled for domain with ID domain1."
)
assert result[0].resource == defender_client.dkim_configurations[0].dict()
assert result[0].resource_name == "DKIM Configuration"
assert result[0].resource_name == "domain1"
assert result[0].resource_id == "domain1"
assert result[0].location == "global"
@@ -86,7 +86,7 @@ class Test_defender_domain_dkim_enabled:
== "DKIM is not enabled for domain with ID domain2."
)
assert result[0].resource == defender_client.dkim_configurations[0].dict()
assert result[0].resource_name == "DKIM Configuration"
assert result[0].resource_name == "domain2"
assert result[0].resource_id == "domain2"
assert result[0].location == "global"

View File

@@ -8,10 +8,12 @@ All notable changes to the **Prowler UI** are documented in this file.
- Mutelist configuration form [(#8190)](https://github.com/prowler-cloud/prowler/pull/8190)
- SAML login integration [(#8203)](https://github.com/prowler-cloud/prowler/pull/8203)
- Github provider support [(#8304)](https://github.com/prowler-cloud/prowler/pull/8304)
- Resource view [(#7760)](https://github.com/prowler-cloud/prowler/pull/7760)
- Navigation link in Scans view to access Compliance Overview [(#8251)](https://github.com/prowler-cloud/prowler/pull/8251)
- Status column for findings table in the Compliance Detail view [(#8244)](https://github.com/prowler-cloud/prowler/pull/8244)
- Allow to restrict routes access based on user permissions [(#8287)](https://github.com/prowler-cloud/prowler/pull/8287)
- Max character limit validation for Scan label [(#8319)](https://github.com/prowler-cloud/prowler/pull/8319)
### Security
@@ -20,6 +22,8 @@ All notable changes to the **Prowler UI** are documented in this file.
### 🔄 Changed
- Upgrade to Next.js 14.2.30 and lock TypeScript to 5.5.4 for ESLint compatibility [(#8189)](https://github.com/prowler-cloud/prowler/pull/8189)
- Improved active step highlighting and updated step titles and descriptions in the Cloud Provider credentials update flow [(#8303)](https://github.com/prowler-cloud/prowler/pull/8303)
- Refactored all existing links across the app to use new custom-link component for consistent styling [(#8341)](https://github.com/prowler-cloud/prowler/pull/8341)
### 🐞 Fixed

View File

@@ -1,10 +1,10 @@
"use client";
import Link from "next/link";
import { useEffect } from "react";
import { RocketIcon } from "@/components/icons";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui";
import { CustomLink } from "@/components/ui/custom/custom-link";
export default function Error({
error,
@@ -27,9 +27,9 @@ export default function Error({
We&apos;re sorry for the inconvenience. Please try again or contact
support if the problem persists.
</AlertDescription>
<Link href="/" className="font-bold">
<CustomLink href="/" target="_self" className="font-bold">
Go to the homepage
</Link>
</CustomLink>
</Alert>
);
}

View File

@@ -9,37 +9,60 @@ import {
AddViaServiceAccountForm,
SelectViaGCP,
} from "@/components/providers/workflow/forms/select-credentials-type/gcp";
import { SelectViaGitHub } from "@/components/providers/workflow/forms/select-credentials-type/github";
import { ProviderType } from "@/types/providers";
interface Props {
searchParams: { type: ProviderType; id: string; via?: string };
}
// Helper function to determine if the credentials form should be shown
const shouldShowCredentialsForm = (
type: ProviderType,
via?: string,
): boolean => {
const credentialsConfig = {
aws: ["credentials"],
gcp: ["credentials"],
github: ["personal_access_token", "oauth_app_token", "github_app"],
};
// If the type is in the configuration, check if the 'via' method is allowed
if (credentialsConfig[type as keyof typeof credentialsConfig]) {
return credentialsConfig[type as keyof typeof credentialsConfig].includes(
via || "",
);
}
// For unspecified types, show the default form
return !["aws", "gcp", "github"].includes(type);
};
export default function AddCredentialsPage({ searchParams }: Props) {
const { type, via } = searchParams;
return (
<>
{searchParams.type === "aws" && !searchParams.via && (
<SelectViaAWS initialVia={searchParams.via} />
)}
{/* Selectors for authentication methods */}
{type === "aws" && !via && <SelectViaAWS initialVia={via} />}
{searchParams.type === "gcp" && !searchParams.via && (
<SelectViaGCP initialVia={searchParams.via} />
)}
{type === "gcp" && !via && <SelectViaGCP initialVia={via} />}
{((searchParams.type === "aws" && searchParams.via === "credentials") ||
(searchParams.type === "gcp" && searchParams.via === "credentials") ||
(searchParams.type !== "aws" && searchParams.type !== "gcp")) && (
{type === "github" && !via && <SelectViaGitHub initialVia={via} />}
{/* Credentials form */}
{shouldShowCredentialsForm(type, via) && (
<AddViaCredentialsForm searchParams={searchParams} />
)}
{searchParams.type === "aws" && searchParams.via === "role" && (
{/* Specific forms */}
{type === "aws" && via === "role" && (
<AddViaRoleForm searchParams={searchParams} />
)}
{searchParams.type === "gcp" &&
searchParams.via === "service-account" && (
<AddViaServiceAccountForm searchParams={searchParams} />
)}
{type === "gcp" && via === "service-account" && (
<AddViaServiceAccountForm searchParams={searchParams} />
)}
</>
);
}

View File

@@ -1,6 +1,6 @@
import React from "react";
import { CredentialsUpdateInfo } from "@/components/providers";
import { CredentialsUpdateInfo } from "@/components/providers/credentials-update-info";
import {
UpdateViaCredentialsForm,
UpdateViaRoleForm,
@@ -17,31 +17,51 @@ interface Props {
};
}
// Helper function to determine if the credentials form should be shown
const shouldShowCredentialsForm = (
type: ProviderType,
via?: string,
): boolean => {
const credentialsConfig = {
aws: ["credentials"],
gcp: ["credentials"],
github: ["personal_access_token", "oauth_app_token", "github_app"],
};
// If the type is in the configuration, check if the 'via' method is allowed
if (credentialsConfig[type as keyof typeof credentialsConfig]) {
return credentialsConfig[type as keyof typeof credentialsConfig].includes(
via || "",
);
}
// For unspecified types, show the default form
return !["aws", "gcp", "github"].includes(type);
};
export default function UpdateCredentialsPage({ searchParams }: Props) {
const { type, via } = searchParams;
return (
<>
{(searchParams.type === "aws" || searchParams.type === "gcp") &&
!searchParams.via && (
<CredentialsUpdateInfo
providerType={searchParams.type}
initialVia={searchParams.via}
/>
)}
{/* Credentials update info for supported providers */}
{(type === "aws" || type === "gcp" || type === "github") && !via && (
<CredentialsUpdateInfo providerType={type} initialVia={via} />
)}
{((searchParams.type === "aws" && searchParams.via === "credentials") ||
(searchParams.type === "gcp" && searchParams.via === "credentials") ||
(searchParams.type !== "aws" && searchParams.type !== "gcp")) && (
{/* Credentials form */}
{shouldShowCredentialsForm(type, via) && (
<UpdateViaCredentialsForm searchParams={searchParams} />
)}
{searchParams.type === "aws" && searchParams.via === "role" && (
{/* Specific forms */}
{type === "aws" && via === "role" && (
<UpdateViaRoleForm searchParams={searchParams} />
)}
{searchParams.type === "gcp" &&
searchParams.via === "service-account" && (
<UpdateViaServiceAccountForm searchParams={searchParams} />
)}
{type === "gcp" && via === "service-account" && (
<UpdateViaServiceAccountForm searchParams={searchParams} />
)}
</>
);
}

View File

@@ -2,7 +2,7 @@
import { zodResolver } from "@hookform/resolvers/zod";
import { Icon } from "@iconify/react";
import { Button, Checkbox, Divider, Link, Tooltip } from "@nextui-org/react";
import { Button, Checkbox, Divider, Tooltip } from "@nextui-org/react";
import { useRouter, useSearchParams } from "next/navigation";
import { useEffect } from "react";
import { useForm } from "react-hook-form";
@@ -15,6 +15,7 @@ import { NotificationIcon, ProwlerExtended } from "@/components/icons";
import { ThemeSwitch } from "@/components/ThemeSwitch";
import { useToast } from "@/components/ui";
import { CustomButton, CustomInput } from "@/components/ui/custom";
import { CustomLink } from "@/components/ui/custom/custom-link";
import {
Form,
FormControl,
@@ -301,13 +302,12 @@ export const AuthForm = ({
onChange={(e) => field.onChange(e.target.checked)}
>
I agree with the&nbsp;
<Link
<CustomLink
href="https://prowler.com/terms-of-service/"
size="sm"
target="_blank"
>
Terms of Service
</Link>
</CustomLink>
&nbsp;of Prowler
</Checkbox>
</FormControl>
@@ -359,13 +359,9 @@ export const AuthForm = ({
content={
<div className="flex-inline text-small">
Social Login with Google is not enabled.{" "}
<Link
target="_blank"
rel="noopener noreferrer"
className="text-xs font-medium text-primary"
>
<CustomLink href="https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app-social-login/#google-oauth-configuration">
Read the docs
</Link>
</CustomLink>
</div>
}
placement="right-start"
@@ -392,13 +388,9 @@ export const AuthForm = ({
content={
<div className="flex-inline text-small">
Social Login with Github is not enabled.{" "}
<Link
target="_blank"
rel="noopener noreferrer"
className="text-xs font-medium text-primary"
>
<CustomLink href="https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app-social-login/#github-oauth-configuration">
Read the docs
</Link>
</CustomLink>
</div>
}
placement="right-start"
@@ -451,12 +443,16 @@ export const AuthForm = ({
{type === "sign-in" ? (
<p className="text-center text-small">
Need to create an account?&nbsp;
<Link href="/sign-up">Sign up</Link>
<CustomLink size="base" href="/sign-up" target="_self">
Sign up
</CustomLink>
</p>
) : (
<p className="text-center text-small">
Already have an account?&nbsp;
<Link href="/sign-in">Log in</Link>
<CustomLink size="base" href="/sign-in" target="_self">
Log in
</CustomLink>
</p>
)}
</div>

View File

@@ -1,3 +1,4 @@
import { CustomLink } from "@/components/ui/custom/custom-link";
import { SeverityBadge } from "@/components/ui/table";
import { Requirement } from "@/types/compliance";
@@ -7,7 +8,6 @@ import {
ComplianceDetailContainer,
ComplianceDetailSection,
ComplianceDetailText,
ComplianceLink,
} from "./shared-components";
export const AWSWellArchitectedCustomDetails = ({
@@ -75,11 +75,9 @@ export const AWSWellArchitectedCustomDetails = ({
{requirement.implementation_guidance_url && (
<ComplianceDetailSection title="Implementation Guidance">
<ComplianceLink
href={requirement.implementation_guidance_url as string}
>
<CustomLink href={requirement.implementation_guidance_url as string}>
{requirement.implementation_guidance_url as string}
</ComplianceLink>
</CustomLink>
</ComplianceDetailSection>
)}
</ComplianceDetailContainer>

View File

@@ -1,5 +1,6 @@
import ReactMarkdown from "react-markdown";
import { CustomLink } from "@/components/ui/custom/custom-link";
import { Requirement } from "@/types/compliance";
import {
@@ -8,7 +9,6 @@ import {
ComplianceDetailContainer,
ComplianceDetailSection,
ComplianceDetailText,
ComplianceLink,
} from "./shared-components";
interface CISDetailsProps {
@@ -121,7 +121,7 @@ export const CISCustomDetails = ({ requirement }: CISDetailsProps) => {
{processReferences(requirement.references).map(
(url: string, index: number) => (
<div key={index}>
<ComplianceLink href={url}>{url}</ComplianceLink>
<CustomLink href={url}>{url}</CustomLink>
</div>
),
)}

View File

@@ -1,3 +1,4 @@
import { CustomLink } from "@/components/ui/custom/custom-link";
import { Requirement } from "@/types/compliance";
import {
@@ -7,7 +8,6 @@ import {
ComplianceDetailContainer,
ComplianceDetailSection,
ComplianceDetailText,
ComplianceLink,
} from "./shared-components";
export const MITRECustomDetails = ({
@@ -63,9 +63,9 @@ export const MITRECustomDetails = ({
{requirement.technique_url && (
<ComplianceDetailSection title="MITRE ATT&CK Reference">
<ComplianceLink href={requirement.technique_url as string}>
<CustomLink href={requirement.technique_url as string}>
{requirement.technique_url as string}
</ComplianceLink>
</CustomLink>
</ComplianceDetailSection>
)}

View File

@@ -1,26 +1,5 @@
import Link from "next/link";
import { cn } from "@/lib/utils";
export const ComplianceLink = ({
href,
children,
}: {
href: string;
children: React.ReactNode;
}) => {
return (
<Link
href={href}
target="_blank"
rel="noopener noreferrer"
className="break-all text-sm text-blue-600 decoration-1 transition-colors hover:text-blue-800 dark:text-blue-400 dark:hover:text-blue-300"
>
{children}
</Link>
);
};
export const ComplianceDetailContainer = ({
children,
}: {

View File

@@ -1,10 +1,10 @@
"use client";
import { Snippet } from "@nextui-org/react";
import Link from "next/link";
import { CodeSnippet } from "@/components/ui/code-snippet/code-snippet";
import { CustomSection } from "@/components/ui/custom";
import { CustomLink } from "@/components/ui/custom/custom-link";
import { EntityInfoShort, InfoField } from "@/components/ui/entities";
import { DateWithTime } from "@/components/ui/entities/date-with-time";
import { SeverityBadge } from "@/components/ui/table/severity-badge";
@@ -151,15 +151,14 @@ export const FindingDetail = ({
{attributes.check_metadata.remediation.recommendation.text}
</p>
{attributes.check_metadata.remediation.recommendation.url && (
<Link
<CustomLink
href={
attributes.check_metadata.remediation.recommendation.url
}
target="_blank"
className="text-sm text-blue-500 hover:underline"
size="sm"
>
Learn more
</Link>
</CustomLink>
)}
</div>
</InfoField>
@@ -179,13 +178,12 @@ export const FindingDetail = ({
{/* Additional Resources section */}
{attributes.check_metadata.remediation.code.other && (
<InfoField label="Additional Resources">
<Link
<CustomLink
href={attributes.check_metadata.remediation.code.other}
target="_blank"
className="text-sm text-blue-500 hover:underline"
size="sm"
>
View documentation
</Link>
</CustomLink>
</InfoField>
)}
</div>

Some files were not shown because too many files have changed in this diff Show More