mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-05-09 00:47:04 +00:00
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| f9eac0409b |
@@ -1,143 +0,0 @@
|
||||
name: "🔎 New Check Request"
|
||||
description: Request a new Prowler security check
|
||||
title: "[New Check]: "
|
||||
labels: ["feature-request", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: search
|
||||
attributes:
|
||||
label: Existing check search
|
||||
description: Confirm this check does not already exist before opening a new request.
|
||||
options:
|
||||
- label: I have searched existing issues, Prowler Hub, and the public roadmap, and this check does not already exist.
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form to describe the security condition that Prowler should evaluate.
|
||||
|
||||
The most useful inputs for [Prowler Studio](https://github.com/prowler-cloud/prowler-studio) are:
|
||||
- What should be detected
|
||||
- What PASS and FAIL mean
|
||||
- Vendor docs, API references, SDK methods, CLI commands, or reference code
|
||||
|
||||
- type: dropdown
|
||||
id: provider
|
||||
attributes:
|
||||
label: Provider
|
||||
description: Cloud or platform this check targets.
|
||||
options:
|
||||
- AWS
|
||||
- Azure
|
||||
- GCP
|
||||
- Kubernetes
|
||||
- GitHub
|
||||
- Microsoft 365
|
||||
- OCI
|
||||
- Alibaba Cloud
|
||||
- Cloudflare
|
||||
- MongoDB Atlas
|
||||
- Google Workspace
|
||||
- OpenStack
|
||||
- Vercel
|
||||
- NHN
|
||||
- Other / New provider
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: other_provider_name
|
||||
attributes:
|
||||
label: New provider name
|
||||
description: Only fill this if you selected "Other / New provider" above.
|
||||
placeholder: "NewProviderName"
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
id: service_name
|
||||
attributes:
|
||||
label: Service or product area
|
||||
description: Optional. Main service, product, or feature to audit.
|
||||
placeholder: "s3, bedrock, entra, repository, apiserver"
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
id: suggested_check_name
|
||||
attributes:
|
||||
label: Suggested check name
|
||||
description: Optional. Use `snake_case` following `<service>_<resource>_<best_practice>`, with lowercase letters and underscores only.
|
||||
placeholder: "bedrock_guardrail_sensitive_information_filter_enabled"
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
id: context
|
||||
attributes:
|
||||
label: Context and goal
|
||||
description: Describe the security problem, why it matters, and what this new check should help detect.
|
||||
placeholder: |-
|
||||
- Security condition to validate:
|
||||
- Why it matters:
|
||||
- Resource, feature, or configuration involved:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: expected_behavior
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: Explain what the check should evaluate and what PASS, FAIL, or MANUAL should mean.
|
||||
placeholder: |-
|
||||
- Resource or scope to evaluate:
|
||||
- PASS when:
|
||||
- FAIL when:
|
||||
- MANUAL when (if applicable):
|
||||
- Exclusions, thresholds, or edge cases:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: references
|
||||
attributes:
|
||||
label: References
|
||||
description: Add vendor docs, API references, SDK methods, CLI commands, endpoint docs, sample payloads, or similar reference material.
|
||||
placeholder: |-
|
||||
- Product or service documentation:
|
||||
- API or SDK reference:
|
||||
- CLI command or endpoint documentation:
|
||||
- Sample payload or response:
|
||||
- Security advisory or benchmark:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: severity
|
||||
attributes:
|
||||
label: Suggested severity
|
||||
description: Your best estimate. Reviewers will confirm during triage.
|
||||
options:
|
||||
- Critical
|
||||
- High
|
||||
- Medium
|
||||
- Low
|
||||
- Informational
|
||||
- Not sure
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: implementation_notes
|
||||
attributes:
|
||||
label: Additional implementation notes
|
||||
description: Optional. Add permissions, unsupported regions, config knobs, product limitations, or anything else that may affect implementation.
|
||||
placeholder: |-
|
||||
- Required permissions or scopes:
|
||||
- Region, tenant, or subscription limitations:
|
||||
- Configurable behavior or thresholds:
|
||||
- Other constraints:
|
||||
validations:
|
||||
required: false
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }},scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
|
||||
@@ -5,16 +5,10 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-container-checks.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-container-checks.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -63,7 +57,16 @@ jobs:
|
||||
|
||||
api-container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -116,22 +119,23 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build container
|
||||
- name: Build container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Scan container with Trivy
|
||||
- name: Scan container with Trivy for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -5,20 +5,10 @@ on:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-tests.yml'
|
||||
- '.github/workflows/api-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-tests.yml'
|
||||
- '.github/workflows/api-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
@@ -4,6 +4,8 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v3'
|
||||
- 'v4.*'
|
||||
- 'v5.*'
|
||||
types:
|
||||
- 'opened'
|
||||
|
||||
@@ -43,11 +43,14 @@ jobs:
|
||||
|
||||
echo "Processing release tag: $RELEASE_TAG"
|
||||
|
||||
# Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
|
||||
VERSION_ONLY="${RELEASE_TAG#v}"
|
||||
|
||||
# Check if it's a minor version (X.Y.0)
|
||||
if [[ "$VERSION_ONLY" =~ ^([0-9]+)\.([0-9]+)\.0$ ]]; then
|
||||
echo "Release $RELEASE_TAG (version $VERSION_ONLY) is a minor version. Proceeding to create backport label."
|
||||
|
||||
# Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
|
||||
MAJOR="${BASH_REMATCH[1]}"
|
||||
MINOR="${BASH_REMATCH[2]}"
|
||||
TWO_DIGIT_VERSION="${MAJOR}.${MINOR}"
|
||||
@@ -59,6 +62,7 @@ jobs:
|
||||
echo "Label name: $LABEL_NAME"
|
||||
echo "Label description: $LABEL_DESC"
|
||||
|
||||
# Check if label already exists
|
||||
if gh label list --repo ${{ github.repository }} --limit 1000 | grep -q "^${LABEL_NAME}[[:space:]]"; then
|
||||
echo "Label '$LABEL_NAME' already exists."
|
||||
else
|
||||
|
||||
@@ -37,13 +37,10 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
# PRs only need the diff range; push to master/release walks the new range from event.before.
|
||||
# 50 is enough headroom for the longest realistic PR/push chain without paying for a full clone.
|
||||
fetch-depth: 50
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Scan diff for secrets with TruffleHog
|
||||
# Action auto-injects --since-commit/--branch from event payload; passing them in extra_args produces duplicate flags.
|
||||
- name: Scan for secrets with TruffleHog
|
||||
uses: trufflesecurity/trufflehog@ef6e76c3c4023279497fab4721ffa071a722fd05 # v3.92.4
|
||||
with:
|
||||
extra_args: --results=verified,unknown
|
||||
extra_args: '--results=verified,unknown'
|
||||
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
org.opencontainers.image.created=${{ github.event_name == 'release' && github.event.release.published_at || github.event.head_commit.timestamp }}
|
||||
${{ github.event_name == 'release' && format('org.opencontainers.image.version={0}', env.RELEASE_TAG) || '' }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }},scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
|
||||
@@ -5,16 +5,10 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'mcp_server/**'
|
||||
- '.github/workflows/mcp-container-checks.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'mcp_server/**'
|
||||
- '.github/workflows/mcp-container-checks.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -62,7 +56,16 @@ jobs:
|
||||
|
||||
mcp-container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -109,22 +112,23 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build MCP container
|
||||
- name: Build MCP container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ env.MCP_WORKING_DIR }}
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Scan MCP container with Trivy
|
||||
- name: Scan MCP container with Trivy for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -86,32 +86,11 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
# The MCP server version (mcp_server/pyproject.toml) is decoupled from the Prowler release
|
||||
# version: it only changes when MCP code changes. mcp-bump-version.yml normally keeps it in
|
||||
# sync with mcp_server/CHANGELOG.md, but this publish workflow still runs on every release.
|
||||
# Pre-flight PyPI check covers the legitimate "no MCP changes for this release" case (and any
|
||||
# workflow_dispatch re-runs) without failing with HTTP 400 (version exists).
|
||||
- name: Check if prowler-mcp version already exists on PyPI
|
||||
id: pypi-check
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
run: |
|
||||
MCP_VERSION=$(grep '^version' pyproject.toml | head -1 | sed -E 's/^version[[:space:]]*=[[:space:]]*"([^"]+)".*/\1/')
|
||||
echo "mcp_version=${MCP_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
if curl -fsS "https://pypi.org/pypi/prowler-mcp/${MCP_VERSION}/json" >/dev/null 2>&1; then
|
||||
echo "skip=true" >> "$GITHUB_OUTPUT"
|
||||
echo "::notice title=Skipping prowler-mcp publish::Version ${MCP_VERSION} already exists on PyPI; bump mcp_server/pyproject.toml to publish a new release."
|
||||
else
|
||||
echo "skip=false" >> "$GITHUB_OUTPUT"
|
||||
echo "::notice title=Publishing prowler-mcp::Version ${MCP_VERSION} not on PyPI yet; proceeding."
|
||||
fi
|
||||
|
||||
- name: Build prowler-mcp package
|
||||
if: steps.pypi-check.outputs.skip != 'true'
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
run: uv build
|
||||
|
||||
- name: Publish prowler-mcp package to PyPI
|
||||
if: steps.pypi-check.outputs.skip != 'true'
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
packages-dir: ${{ env.WORKING_DIRECTORY }}/dist/
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
name: 'Nightly: ARM64 Container Builds'
|
||||
|
||||
# Mitigation for amd64-only PR container-checks: build amd64+arm64 nightly against
|
||||
# master to keep arm-specific Dockerfile regressions caught quickly. Build only —
|
||||
# no push, no Trivy (weekly checks already cover that).
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 4 * * *'
|
||||
workflow_dispatch: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
build-arm64:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 60
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- component: sdk
|
||||
context: .
|
||||
dockerfile: ./Dockerfile
|
||||
image_name: prowler
|
||||
- component: api
|
||||
context: ./api
|
||||
dockerfile: ./api/Dockerfile
|
||||
image_name: prowler-api
|
||||
- component: ui
|
||||
context: ./ui
|
||||
dockerfile: ./ui/Dockerfile
|
||||
image_name: prowler-ui
|
||||
target: prod
|
||||
build_args: |
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX
|
||||
- component: mcp
|
||||
context: ./mcp_server
|
||||
dockerfile: ./mcp_server/Dockerfile
|
||||
image_name: prowler-mcp
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build ${{ matrix.component }} container (linux/arm64)
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.dockerfile }}
|
||||
target: ${{ matrix.target }}
|
||||
push: false
|
||||
load: false
|
||||
platforms: linux/arm64
|
||||
tags: ${{ matrix.image_name }}:nightly-arm64
|
||||
build-args: ${{ matrix.build_args }}
|
||||
cache-from: type=gha,scope=arm64
|
||||
cache-to: type=gha,mode=min,scope=arm64
|
||||
|
||||
notify-failure:
|
||||
needs: build-arm64
|
||||
if: failure() && github.event_name == 'schedule'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Notify Slack on failure
|
||||
uses: slackapi/slack-github-action@91efab103c0de0a537f72a35f6b8cda0ee76bf0a # v2.1.1
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
text: ":rotating_light: Nightly arm64 container build failed for prowler — <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|view run>"
|
||||
errors: true
|
||||
@@ -41,15 +41,10 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
fetch-depth: 0
|
||||
# zizmor: ignore[artipacked]
|
||||
persist-credentials: true # Required by tj-actions/changed-files to fetch PR branch
|
||||
|
||||
- name: Fetch PR base ref for tj-actions/changed-files
|
||||
env:
|
||||
BASE_REF: ${{ github.event.pull_request.base.ref }}
|
||||
run: git fetch --depth=1 origin "${BASE_REF}"
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
|
||||
@@ -45,15 +45,10 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
fetch-depth: 1
|
||||
fetch-depth: 0
|
||||
# zizmor: ignore[artipacked]
|
||||
persist-credentials: true # Required by tj-actions/changed-files to fetch PR branch
|
||||
|
||||
- name: Fetch PR base ref for tj-actions/changed-files
|
||||
env:
|
||||
BASE_REF: ${{ github.event.pull_request.base.ref }}
|
||||
run: git fetch --depth=1 origin "${BASE_REF}"
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
|
||||
@@ -36,14 +36,8 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 1
|
||||
# zizmor: ignore[artipacked]
|
||||
persist-credentials: true # Required by tj-actions/changed-files to fetch PR branch
|
||||
|
||||
- name: Fetch PR base ref for tj-actions/changed-files
|
||||
env:
|
||||
BASE_REF: ${{ github.event.pull_request.base.ref }}
|
||||
run: git fetch --depth=1 origin "${BASE_REF}"
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
|
||||
@@ -5,9 +5,6 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'tests/providers/**/*_test.py'
|
||||
- '.github/workflows/sdk-check-duplicate-test-names.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
@@ -3,7 +3,9 @@ name: 'SDK: Container Build and Push'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v3' # For v3-latest
|
||||
- 'v4.6' # For v4-latest
|
||||
- 'master' # For latest
|
||||
paths-ignore:
|
||||
- '.github/**'
|
||||
- '!.github/workflows/sdk-container-build-push.yml'
|
||||
@@ -54,6 +56,7 @@ jobs:
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
prowler_version: ${{ steps.get-prowler-version.outputs.prowler_version }}
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.prowler_version_major }}
|
||||
latest_tag: ${{ steps.get-prowler-version.outputs.latest_tag }}
|
||||
stable_tag: ${{ steps.get-prowler-version.outputs.stable_tag }}
|
||||
permissions:
|
||||
@@ -89,13 +92,32 @@ jobs:
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
echo "prowler_version=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Extract major version
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
if [[ "${PROWLER_VERSION_MAJOR}" != "5" ]]; then
|
||||
echo "::error::Unsupported Prowler major version: ${PROWLER_VERSION_MAJOR}"
|
||||
exit 1
|
||||
fi
|
||||
echo "latest_tag=latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "prowler_version_major=${PROWLER_VERSION_MAJOR}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Set version-specific tags
|
||||
case ${PROWLER_VERSION_MAJOR} in
|
||||
3)
|
||||
echo "latest_tag=v3-latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=v3-stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v3 detected - tags: v3-latest, v3-stable"
|
||||
;;
|
||||
4)
|
||||
echo "latest_tag=v4-latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=v4-stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v4 detected - tags: v4-latest, v4-stable"
|
||||
;;
|
||||
5)
|
||||
echo "latest_tag=latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v5 detected - tags: latest, stable"
|
||||
;;
|
||||
*)
|
||||
echo "::error::Unsupported Prowler major version: ${PROWLER_VERSION_MAJOR}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
notify-release-started:
|
||||
if: github.repository == 'prowler-cloud/prowler' && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
|
||||
@@ -206,7 +228,7 @@ jobs:
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }},scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
@@ -364,3 +386,39 @@ jobs:
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.outcome.outputs.outcome }}
|
||||
update-ts: ${{ needs.notify-release-started.outputs.message-ts }}
|
||||
|
||||
dispatch-v3-deployment:
|
||||
needs: [setup, container-build-push]
|
||||
if: always() && needs.setup.outputs.prowler_version_major == '3' && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Harden the runner (Audit all outbound calls)
|
||||
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Calculate short SHA
|
||||
id: short-sha
|
||||
run: echo "short_sha=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Dispatch v3 deployment (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}
|
||||
event-type: dispatch
|
||||
client-payload: '{"version":"v3-latest","tag":"${{ steps.short-sha.outputs.short_sha }}"}'
|
||||
|
||||
- name: Dispatch v3 deployment (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}
|
||||
event-type: dispatch
|
||||
client-payload: '{"version":"release","tag":"${{ needs.setup.outputs.prowler_version }}"}'
|
||||
|
||||
@@ -5,22 +5,10 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'prowler/**'
|
||||
- 'Dockerfile*'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/sdk-container-checks.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'prowler/**'
|
||||
- 'Dockerfile*'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/sdk-container-checks.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -68,7 +56,16 @@ jobs:
|
||||
|
||||
sdk-container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -135,22 +132,23 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build SDK container
|
||||
- name: Build SDK container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Scan SDK container with Trivy
|
||||
- name: Scan SDK container with Trivy for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -5,26 +5,10 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'prowler/**'
|
||||
- 'tests/**'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/sdk-tests.yml'
|
||||
- '.github/workflows/sdk-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'prowler/**'
|
||||
- 'tests/**'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/sdk-tests.yml'
|
||||
- '.github/workflows/sdk-security.yml'
|
||||
- '.github/actions/setup-python-poetry/**'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }},scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
|
||||
@@ -5,16 +5,10 @@ on:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'ui/**'
|
||||
- '.github/workflows/ui-container-checks.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'ui/**'
|
||||
- '.github/workflows/ui-container-checks.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -63,7 +57,16 @@ jobs:
|
||||
|
||||
ui-container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -111,7 +114,7 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build UI container
|
||||
- name: Build UI container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
@@ -119,17 +122,18 @@ jobs:
|
||||
target: prod
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=${{ github.event_name == 'pull_request' && 'min' || 'max' }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX
|
||||
|
||||
- name: Scan UI container with Trivy
|
||||
- name: Scan UI container with Trivy for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -15,10 +15,6 @@ on:
|
||||
- 'ui/**'
|
||||
- 'api/**' # API changes can affect UI E2E
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
@@ -164,7 +160,7 @@ jobs:
|
||||
'
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
|
||||
with:
|
||||
node-version: '24.13.0'
|
||||
|
||||
@@ -270,7 +266,7 @@ jobs:
|
||||
with:
|
||||
name: playwright-report
|
||||
path: ui/playwright-report/
|
||||
retention-days: 7
|
||||
retention-days: 30
|
||||
|
||||
- name: Cleanup services
|
||||
if: always()
|
||||
|
||||
@@ -92,7 +92,7 @@ jobs:
|
||||
|
||||
- name: Setup Node.js ${{ env.NODE_VERSION }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ rules:
|
||||
- docs-bump-version.yml
|
||||
- issue-triage.lock.yml
|
||||
- mcp-container-build-push.yml
|
||||
- nightly-arm64-container-builds.yml
|
||||
- pr-merged.yml
|
||||
- prepare-release.yml
|
||||
- sdk-bump-version.yml
|
||||
|
||||
+19
-4
@@ -6,7 +6,7 @@
|
||||
# P40 — security scanners
|
||||
# P50 — dependency validation
|
||||
|
||||
default_install_hook_types: [pre-commit]
|
||||
default_install_hook_types: [pre-commit, pre-push]
|
||||
|
||||
repos:
|
||||
## GENERAL (prek built-in — no external repo needed)
|
||||
@@ -62,7 +62,12 @@ repos:
|
||||
- id: autoflake
|
||||
name: "SDK - autoflake"
|
||||
files: { glob: ["{prowler,tests,dashboard,util,scripts}/**/*.py"] }
|
||||
args: ["--in-place", "--remove-all-unused-imports", "--remove-unused-variable"]
|
||||
args:
|
||||
[
|
||||
"--in-place",
|
||||
"--remove-all-unused-imports",
|
||||
"--remove-unused-variable",
|
||||
]
|
||||
priority: 20
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
@@ -174,7 +179,8 @@ repos:
|
||||
language: system
|
||||
types: [python]
|
||||
files: '.*\.py'
|
||||
exclude: { glob: ["{contrib,skills}/**", "**/.venv/**", "**/*_test.py"] }
|
||||
exclude:
|
||||
{ glob: ["{contrib,skills}/**", "**/.venv/**", "**/*_test.py"] }
|
||||
priority: 40
|
||||
|
||||
- id: safety
|
||||
@@ -184,7 +190,16 @@ repos:
|
||||
entry: safety check --policy-file .safety-policy.yml
|
||||
language: system
|
||||
pass_filenames: false
|
||||
files: { glob: ["**/pyproject.toml", "**/poetry.lock", "**/requirements*.txt", ".safety-policy.yml"] }
|
||||
files:
|
||||
{
|
||||
glob:
|
||||
[
|
||||
"**/pyproject.toml",
|
||||
"**/poetry.lock",
|
||||
"**/requirements*.txt",
|
||||
".safety-policy.yml",
|
||||
],
|
||||
}
|
||||
priority: 40
|
||||
|
||||
- id: vulture
|
||||
|
||||
+1
-1
@@ -6,7 +6,7 @@ LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
|
||||
|
||||
ARG TRIVY_VERSION=0.70.0
|
||||
ARG TRIVY_VERSION=0.69.2
|
||||
ENV TRIVY_VERSION=${TRIVY_VERSION}
|
||||
|
||||
ARG ZIZMOR_VERSION=1.24.1
|
||||
|
||||
@@ -7,13 +7,6 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
### 🚀 Added
|
||||
|
||||
- New `scan-reset-ephemeral-resources` post-scan task zeroes `failed_findings_count` for resources missing from the latest full-scope scan, keeping ephemeral resources from polluting the Resources page sort [(#10929)](https://github.com/prowler-cloud/prowler/pull/10929)
|
||||
- ASD Essential Eight (AWS) compliance framework support [(#10982)](https://github.com/prowler-cloud/prowler/pull/10982)
|
||||
- `scan-reset-ephemeral-resources` post-scan task zeroes `failed_findings_count` for resources missing from the latest full-scope scan, keeping ephemeral resources from polluting the Resources page sort [(#10929)](https://github.com/prowler-cloud/prowler/pull/10929)
|
||||
- `GET /resources/{id}/events` now supports `Accept: text/plain` for LLM consumption [(#XXXXX)](https://github.com/prowler-cloud/prowler/pull/XXXXX)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- `trivy` binary from 0.69.2 to 0.70.0 and `cryptography` from 46.0.6 to 46.0.7 (transitive via prowler SDK) in the API image for CVE-2026-33186 and CVE-2026-39892 [(#10978)](https://github.com/prowler-cloud/prowler/pull/10978)
|
||||
|
||||
---
|
||||
|
||||
|
||||
+1
-1
@@ -5,7 +5,7 @@ LABEL maintainer="https://github.com/prowler-cloud/api"
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
|
||||
|
||||
ARG TRIVY_VERSION=0.70.0
|
||||
ARG TRIVY_VERSION=0.69.2
|
||||
ENV TRIVY_VERSION=${TRIVY_VERSION}
|
||||
|
||||
ARG ZIZMOR_VERSION=1.24.1
|
||||
|
||||
Generated
+61
-61
@@ -2504,61 +2504,61 @@ dev = ["bandit", "coverage", "flake8", "pydocstyle", "pylint", "pytest", "pytest
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.7"
|
||||
version = "46.0.6"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fc9ab8856ae6cf7c9358430e49b368f3108f050031442eaeb6b9d87e4dcf4e4f"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d3b99c535a9de0adced13d159c5a9cf65c325601aa30f4be08afd680643e9c15"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d02c738dacda7dc2a74d1b2b3177042009d5cab7c7079db74afc19e56ca1b455"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:04959522f938493042d595a736e7dbdff6eb6cc2339c11465b3ff89343b65f65"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3986ac1dee6def53797289999eabe84798ad7817f3e97779b5061a95b0ee4968"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:258514877e15963bd43b558917bc9f54cf7cf866c38aa576ebf47a77ddbc43a4"},
|
||||
{file = "cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e"},
|
||||
{file = "cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2571,7 +2571,7 @@ nox = ["nox[uv] (>=2024.4.15)"]
|
||||
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
|
||||
sdist = ["build (>=1.0.0)"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.7)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.6)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
@@ -6665,7 +6665,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "prowler"
|
||||
version = "5.26.0"
|
||||
version = "5.25.0"
|
||||
description = "Prowler is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks."
|
||||
optional = false
|
||||
python-versions = ">=3.10,<3.13"
|
||||
@@ -6720,7 +6720,7 @@ boto3 = "1.40.61"
|
||||
botocore = "1.40.61"
|
||||
cloudflare = "4.3.1"
|
||||
colorama = "0.4.6"
|
||||
cryptography = "46.0.7"
|
||||
cryptography = "46.0.6"
|
||||
dash = "3.1.1"
|
||||
dash-bootstrap-components = "2.0.3"
|
||||
defusedxml = "0.7.1"
|
||||
@@ -6755,7 +6755,7 @@ uuid6 = "2024.7.10"
|
||||
type = "git"
|
||||
url = "https://github.com/prowler-cloud/prowler.git"
|
||||
reference = "master"
|
||||
resolved_reference = "16798e293da365965120961e6539e3a9756564f9"
|
||||
resolved_reference = "ca29e354b622198ff6a70e2ea5eb04e4a44a0903"
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
@@ -7912,26 +7912,26 @@ shaping = ["uharfbuzz"]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.33.1"
|
||||
version = "2.32.5"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a"},
|
||||
{file = "requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517"},
|
||||
{file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"},
|
||||
{file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2023.5.7"
|
||||
certifi = ">=2017.4.17"
|
||||
charset_normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7", optional = true, markers = "extra == \"socks\""}
|
||||
urllib3 = ">=1.26,<3"
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<8)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "requests-file"
|
||||
|
||||
+1
-1
@@ -63,7 +63,6 @@ docker = "7.1.0"
|
||||
filelock = "3.20.3"
|
||||
freezegun = "1.5.1"
|
||||
mypy = "1.10.1"
|
||||
prek = "0.3.9"
|
||||
pylint = "3.2.5"
|
||||
pytest = "9.0.3"
|
||||
pytest-cov = "5.0.0"
|
||||
@@ -75,3 +74,4 @@ ruff = "0.5.0"
|
||||
safety = "3.7.0"
|
||||
tqdm = "4.67.1"
|
||||
vulture = "2.14"
|
||||
prek = "0.3.9"
|
||||
|
||||
@@ -1,141 +0,0 @@
|
||||
"""Helpers for serializing resource timeline events into LLM-friendly formats.
|
||||
|
||||
The text renderer is a 1:1 markdown projection of what the JSON endpoint
|
||||
returns: same events, same order, same fields. We do not infer sessions or
|
||||
relationships between events — grouping is left to the consumer.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Iterable
|
||||
|
||||
# Truncation thresholds for payload values. Strings longer than this are
|
||||
# clipped with an ellipsis; lists/dicts larger than this collapse to a count
|
||||
# placeholder. The goal is to bound a single event's token cost without
|
||||
# losing the API call's identity.
|
||||
MAX_STRING_LEN = 200
|
||||
MAX_LIST_INLINE = 5
|
||||
MAX_DICT_INLINE = 8
|
||||
|
||||
|
||||
def serialize_events_as_text(
|
||||
events: Iterable[dict[str, Any]],
|
||||
resource: Any,
|
||||
lookback_days: int,
|
||||
write_events_only: bool,
|
||||
) -> str:
|
||||
"""Render resource events as a flat markdown list of what the API returns."""
|
||||
events = list(events)
|
||||
lines: list[str] = []
|
||||
|
||||
lines.append("# Resource Events")
|
||||
lines.append(f"- Resource: {getattr(resource, 'uid', '')}")
|
||||
lines.append(f"- Region: {getattr(resource, 'region', '') or 'global'}")
|
||||
lines.append(f"- Lookback: {lookback_days} days")
|
||||
lines.append(f"- Write events only: {str(write_events_only).lower()}")
|
||||
lines.append(f"- Events: {len(events)}")
|
||||
lines.append("")
|
||||
|
||||
if not events:
|
||||
lines.append("No events recorded in the lookback window.")
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
lines.append("## Events")
|
||||
lines.append("")
|
||||
|
||||
for index, event in enumerate(events, 1):
|
||||
lines.extend(_format_event(index, event))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines).rstrip() + "\n"
|
||||
|
||||
|
||||
def _format_event(index: int, event: dict[str, Any]) -> list[str]:
|
||||
when = _format_time(_event_time(event))
|
||||
name = event.get("event_name") or "Unknown"
|
||||
source = event.get("event_source") or "unknown"
|
||||
error_code = event.get("error_code")
|
||||
status = f"ERROR({error_code})" if error_code else "ok"
|
||||
|
||||
lines = [f"### {index}. {name} at {when}"]
|
||||
lines.append(f"- Source: {source}")
|
||||
lines.append(f"- Status: {status}")
|
||||
|
||||
if event.get("actor"):
|
||||
lines.append(f"- Actor: {event['actor']}")
|
||||
if event.get("actor_type"):
|
||||
lines.append(f"- Actor type: {event['actor_type']}")
|
||||
if event.get("actor_uid"):
|
||||
lines.append(f"- Actor ARN: {event['actor_uid']}")
|
||||
if event.get("source_ip_address"):
|
||||
lines.append(f"- Source IP: {event['source_ip_address']}")
|
||||
if event.get("user_agent"):
|
||||
lines.append(f"- User agent: {event['user_agent']}")
|
||||
|
||||
request = _format_payload(event.get("request_data"))
|
||||
if request:
|
||||
lines.append(f"- Request: {request}")
|
||||
|
||||
response = _format_payload(event.get("response_data"))
|
||||
if response:
|
||||
lines.append(f"- Response: {response}")
|
||||
|
||||
if error_code and event.get("error_message"):
|
||||
lines.append(f"- Error: {event['error_message']}")
|
||||
|
||||
if event.get("event_id"):
|
||||
lines.append(f"- Event ID: {event['event_id']}")
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def _format_payload(payload: Any) -> str:
|
||||
if not isinstance(payload, dict) or not payload:
|
||||
return ""
|
||||
return (
|
||||
"{" + ", ".join(f"{k}: {_summarize_value(v)}" for k, v in payload.items()) + "}"
|
||||
)
|
||||
|
||||
|
||||
def _summarize_value(value: Any) -> str:
|
||||
if isinstance(value, str):
|
||||
if len(value) <= MAX_STRING_LEN:
|
||||
return f'"{value}"'
|
||||
return f'"{value[: MAX_STRING_LEN - 3]}..."'
|
||||
if isinstance(value, bool):
|
||||
return "true" if value else "false"
|
||||
if value is None:
|
||||
return "null"
|
||||
if isinstance(value, (int, float)):
|
||||
return str(value)
|
||||
if isinstance(value, (list, tuple)):
|
||||
if len(value) > MAX_LIST_INLINE:
|
||||
return f"[{len(value)} items]"
|
||||
return "[" + ", ".join(_summarize_value(v) for v in value) + "]"
|
||||
if isinstance(value, dict):
|
||||
if len(value) > MAX_DICT_INLINE:
|
||||
return f"{{{len(value)} keys}}"
|
||||
return (
|
||||
"{"
|
||||
+ ", ".join(f"{k}: {_summarize_value(v)}" for k, v in value.items())
|
||||
+ "}"
|
||||
)
|
||||
return str(value)
|
||||
|
||||
|
||||
def _event_time(event: dict[str, Any]) -> datetime:
|
||||
value = event.get("event_time")
|
||||
if isinstance(value, datetime):
|
||||
return value if value.tzinfo else value.replace(tzinfo=timezone.utc)
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
parsed = datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
return parsed if parsed.tzinfo else parsed.replace(tzinfo=timezone.utc)
|
||||
except ValueError:
|
||||
return datetime.min.replace(tzinfo=timezone.utc)
|
||||
return datetime.min.replace(tzinfo=timezone.utc)
|
||||
|
||||
|
||||
def _format_time(value: datetime) -> str:
|
||||
if value.tzinfo is None:
|
||||
value = value.replace(tzinfo=timezone.utc)
|
||||
return value.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
@@ -1,366 +0,0 @@
|
||||
"""Unit tests for api.events.views_helpers.
|
||||
|
||||
These tests exercise the text-renderer in isolation: no Django, no DRF, no DB.
|
||||
The behavior under test is the markdown shape, payload sanitization, and
|
||||
truncation rules.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
|
||||
from api.events import views_helpers
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def resource():
|
||||
return SimpleNamespace(
|
||||
uid="arn:aws:s3:::acme-prod-data",
|
||||
region="us-east-1",
|
||||
)
|
||||
|
||||
|
||||
def _event(**overrides):
|
||||
base = {
|
||||
"event_id": "evt-1",
|
||||
"event_time": datetime(2026, 5, 4, 16, 55, 1, tzinfo=timezone.utc),
|
||||
"event_name": "PutBucketPolicy",
|
||||
"event_source": "s3.amazonaws.com",
|
||||
"actor": "assumed-role/AdminRole/alice",
|
||||
"actor_uid": "arn:aws:sts::123:assumed-role/AdminRole/alice",
|
||||
"actor_type": "AssumedRole",
|
||||
"source_ip_address": "1.2.3.4",
|
||||
"user_agent": "aws-cli/2.15.30",
|
||||
"request_data": None,
|
||||
"response_data": None,
|
||||
"error_code": None,
|
||||
"error_message": None,
|
||||
}
|
||||
base.update(overrides)
|
||||
return base
|
||||
|
||||
|
||||
class TestSerializeEventsAsTextHeader:
|
||||
def test_empty_events_renders_header_and_no_events_marker(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert text.startswith("# Resource Events\n")
|
||||
assert "- Resource: arn:aws:s3:::acme-prod-data" in text
|
||||
assert "- Region: us-east-1" in text
|
||||
assert "- Lookback: 90 days" in text
|
||||
assert "- Write events only: true" in text
|
||||
assert "- Events: 0" in text
|
||||
assert "No events recorded in the lookback window." in text
|
||||
# No "## Events" section when empty
|
||||
assert "## Events" not in text
|
||||
|
||||
def test_missing_region_renders_global(self, resource):
|
||||
resource.region = ""
|
||||
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[], resource=resource, lookback_days=7, write_events_only=False
|
||||
)
|
||||
|
||||
assert "- Region: global" in text
|
||||
assert "- Write events only: false" in text
|
||||
assert "- Lookback: 7 days" in text
|
||||
|
||||
def test_resource_without_uid_attribute_renders_blank(self):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[],
|
||||
resource=SimpleNamespace(),
|
||||
lookback_days=1,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
# getattr defaults to "" for both fields, no crash.
|
||||
assert "- Resource: \n" in text
|
||||
assert "- Region: global" in text
|
||||
|
||||
|
||||
class TestSerializeEventsAsTextBody:
|
||||
def test_single_event_renders_all_present_fields(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[_event()],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "## Events" in text
|
||||
assert "### 1. PutBucketPolicy at 2026-05-04T16:55:01Z" in text
|
||||
assert "- Source: s3.amazonaws.com" in text
|
||||
assert "- Status: ok" in text
|
||||
assert "- Actor: assumed-role/AdminRole/alice" in text
|
||||
assert "- Actor type: AssumedRole" in text
|
||||
assert "- Actor ARN: arn:aws:sts::123:assumed-role/AdminRole/alice" in text
|
||||
assert "- Source IP: 1.2.3.4" in text
|
||||
assert "- User agent: aws-cli/2.15.30" in text
|
||||
assert "- Event ID: evt-1" in text
|
||||
|
||||
def test_optional_fields_are_omitted_when_absent(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[
|
||||
_event(
|
||||
actor_type=None,
|
||||
actor_uid=None,
|
||||
source_ip_address=None,
|
||||
user_agent=None,
|
||||
)
|
||||
],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "- Actor type:" not in text
|
||||
assert "- Actor ARN:" not in text
|
||||
assert "- Source IP:" not in text
|
||||
assert "- User agent:" not in text
|
||||
# Required field still rendered
|
||||
assert "- Actor: assumed-role/AdminRole/alice" in text
|
||||
|
||||
def test_error_event_renders_error_code_and_message(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[
|
||||
_event(
|
||||
error_code="AccessDenied",
|
||||
error_message=(
|
||||
"User: arn:aws:sts::123:assumed-role/AdminRole/alice "
|
||||
"is not authorized to perform: s3:PutBucketAcl"
|
||||
),
|
||||
)
|
||||
],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "- Status: ERROR(AccessDenied)" in text
|
||||
assert "- Error: User: arn:aws:sts::123:assumed-role/AdminRole/alice" in text
|
||||
|
||||
def test_error_message_omitted_when_no_error_code(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[
|
||||
_event(
|
||||
error_code=None,
|
||||
error_message="orphaned message that should be ignored",
|
||||
)
|
||||
],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "- Status: ok" in text
|
||||
assert "orphaned message" not in text
|
||||
|
||||
def test_event_order_is_preserved_no_sorting(self, resource):
|
||||
# API returns CloudTrail events in its native order; the renderer
|
||||
# must NOT re-sort them.
|
||||
first = _event(
|
||||
event_id="newest",
|
||||
event_name="GetBucketPolicy",
|
||||
event_time=datetime(2026, 5, 4, 17, 2, 11, tzinfo=timezone.utc),
|
||||
)
|
||||
second = _event(
|
||||
event_id="middle",
|
||||
event_name="PutBucketAcl",
|
||||
event_time=datetime(2026, 5, 4, 16, 58, 33, tzinfo=timezone.utc),
|
||||
)
|
||||
third = _event(
|
||||
event_id="oldest",
|
||||
event_name="PutBucketPolicy",
|
||||
event_time=datetime(2026, 5, 4, 16, 55, 1, tzinfo=timezone.utc),
|
||||
)
|
||||
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[first, second, third],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
idx_first = text.index("### 1. GetBucketPolicy")
|
||||
idx_second = text.index("### 2. PutBucketAcl")
|
||||
idx_third = text.index("### 3. PutBucketPolicy")
|
||||
assert idx_first < idx_second < idx_third
|
||||
|
||||
def test_event_count_in_header_matches_body(self, resource):
|
||||
events = [_event(event_id=f"e{i}") for i in range(3)]
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=events,
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "- Events: 3" in text
|
||||
assert text.count("### ") == 3
|
||||
|
||||
|
||||
class TestPayloadFormatting:
|
||||
def test_request_data_renders_inline(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[
|
||||
_event(
|
||||
request_data={
|
||||
"bucketName": "acme-prod-data",
|
||||
"encrypted": True,
|
||||
}
|
||||
)
|
||||
],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert '- Request: {bucketName: "acme-prod-data", encrypted: true}' in text
|
||||
|
||||
def test_request_data_empty_dict_omits_line(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[_event(request_data={})],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "- Request:" not in text
|
||||
|
||||
def test_response_data_renders_when_present(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[
|
||||
_event(
|
||||
response_data={"versionId": "abc123"},
|
||||
)
|
||||
],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert '- Response: {versionId: "abc123"}' in text
|
||||
|
||||
def test_long_strings_are_truncated(self, resource):
|
||||
long_policy = "x" * 500
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[_event(request_data={"policy": long_policy})],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
# 200-char threshold, with "..." marker on truncation
|
||||
assert "..." in text
|
||||
# The full 500-char value must NOT be present
|
||||
assert long_policy not in text
|
||||
|
||||
def test_large_list_summarized_as_count(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[_event(request_data={"tags": list(range(20))})],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "tags: [20 items]" in text
|
||||
|
||||
def test_small_list_renders_inline(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[_event(request_data={"ports": [80, 443]})],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "ports: [80, 443]" in text
|
||||
|
||||
def test_large_dict_summarized_as_count(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[
|
||||
_event(
|
||||
request_data={
|
||||
"config": {f"key{i}": i for i in range(15)},
|
||||
}
|
||||
)
|
||||
],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "config: {15 keys}" in text
|
||||
|
||||
def test_bool_and_none_values_lowercased(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[
|
||||
_event(
|
||||
request_data={
|
||||
"publicAccess": True,
|
||||
"encryption": False,
|
||||
"kmsKey": None,
|
||||
}
|
||||
)
|
||||
],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "publicAccess: true" in text
|
||||
assert "encryption: false" in text
|
||||
assert "kmsKey: null" in text
|
||||
|
||||
def test_request_data_non_dict_is_ignored(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[_event(request_data="not a dict")],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "- Request:" not in text
|
||||
|
||||
|
||||
class TestTimeFormatting:
|
||||
def test_event_time_as_naive_datetime_is_treated_as_utc(self, resource):
|
||||
# Defensive: providers occasionally hand back naive datetimes;
|
||||
# they must be normalized rather than crashing the renderer.
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[
|
||||
_event(event_time=datetime(2026, 5, 4, 16, 55, 1)),
|
||||
],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "### 1. PutBucketPolicy at 2026-05-04T16:55:01Z" in text
|
||||
|
||||
def test_event_time_as_iso_string_is_parsed(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[_event(event_time="2026-05-04T16:55:01Z")],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
assert "### 1. PutBucketPolicy at 2026-05-04T16:55:01Z" in text
|
||||
|
||||
def test_unparseable_event_time_does_not_crash(self, resource):
|
||||
text = views_helpers.serialize_events_as_text(
|
||||
events=[_event(event_time="garbage")],
|
||||
resource=resource,
|
||||
lookback_days=90,
|
||||
write_events_only=True,
|
||||
)
|
||||
|
||||
# Falls back to datetime.min — exact value isn't important, but
|
||||
# the renderer must not raise.
|
||||
assert "### 1. PutBucketPolicy at " in text
|
||||
@@ -1469,9 +1469,9 @@ class TestProviderViewSet:
|
||||
|
||||
included_data = response.json()["included"]
|
||||
for expected_type in expected_resources:
|
||||
assert any(d.get("type") == expected_type for d in included_data), (
|
||||
f"Expected type '{expected_type}' not found in included data"
|
||||
)
|
||||
assert any(
|
||||
d.get("type") == expected_type for d in included_data
|
||||
), f"Expected type '{expected_type}' not found in included data"
|
||||
|
||||
def test_providers_retrieve(self, authenticated_client, providers_fixture):
|
||||
provider1, *_ = providers_fixture
|
||||
@@ -5468,13 +5468,13 @@ class TestAttackPathsScanViewSet:
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
if i < 10:
|
||||
assert response.status_code == status.HTTP_200_OK, (
|
||||
f"Request {i + 1} should succeed with 200 OK, got {response.status_code}"
|
||||
)
|
||||
assert (
|
||||
response.status_code == status.HTTP_200_OK
|
||||
), f"Request {i + 1} should succeed with 200 OK, got {response.status_code}"
|
||||
else:
|
||||
assert response.status_code == status.HTTP_429_TOO_MANY_REQUESTS, (
|
||||
f"Request {i + 1} should be throttled"
|
||||
)
|
||||
assert (
|
||||
response.status_code == status.HTTP_429_TOO_MANY_REQUESTS
|
||||
), f"Request {i + 1} should be throttled"
|
||||
|
||||
# -- Timeout simulation -------------------------------------------------------
|
||||
|
||||
@@ -5677,9 +5677,9 @@ class TestResourceViewSet:
|
||||
|
||||
included_data = response.json()["included"]
|
||||
for expected_type in expected_resources:
|
||||
assert any(d.get("type") == expected_type for d in included_data), (
|
||||
f"Expected type '{expected_type}' not found in included data"
|
||||
)
|
||||
assert any(
|
||||
d.get("type") == expected_type for d in included_data
|
||||
), f"Expected type '{expected_type}' not found in included data"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"filter_name, filter_value, expected_count",
|
||||
@@ -6228,9 +6228,9 @@ class TestResourceViewSet:
|
||||
(e for e in errors if e["source"]["parameter"] == expected_invalid_param),
|
||||
None,
|
||||
)
|
||||
assert error is not None, (
|
||||
f"Expected error for parameter '{expected_invalid_param}'"
|
||||
)
|
||||
assert (
|
||||
error is not None
|
||||
), f"Expected error for parameter '{expected_invalid_param}'"
|
||||
assert error["code"] == "invalid"
|
||||
assert error["status"] == "400" # Must be string per JSON:API spec
|
||||
assert expected_invalid_param in error["detail"]
|
||||
@@ -6762,187 +6762,16 @@ class TestResourceViewSet:
|
||||
# Test with completely malformed token
|
||||
client.credentials(HTTP_AUTHORIZATION="Bearer not.a.valid.jwt.token")
|
||||
response = client.get(reverse("resource-events", kwargs={"pk": resource.id}))
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED, (
|
||||
f"Expected 401 for malformed token but got {response.status_code}"
|
||||
)
|
||||
assert (
|
||||
response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
), f"Expected 401 for malformed token but got {response.status_code}"
|
||||
|
||||
# Test with empty bearer token
|
||||
client.credentials(HTTP_AUTHORIZATION="Bearer ")
|
||||
response = client.get(reverse("resource-events", kwargs={"pk": resource.id}))
|
||||
assert response.status_code == status.HTTP_401_UNAUTHORIZED, (
|
||||
f"Expected 401 for empty bearer token but got {response.status_code}"
|
||||
)
|
||||
|
||||
@patch("api.v1.views.initialize_prowler_provider")
|
||||
@patch("api.v1.views.CloudTrailTimeline")
|
||||
def test_events_text_plain_renders_markdown(
|
||||
self,
|
||||
mock_cloudtrail_timeline,
|
||||
mock_initialize_provider,
|
||||
authenticated_client,
|
||||
providers_fixture,
|
||||
):
|
||||
"""`Accept: text/plain` returns a markdown report instead of JSON:API."""
|
||||
from api.models import Resource
|
||||
|
||||
aws_provider = providers_fixture[0]
|
||||
|
||||
resource = Resource.objects.create(
|
||||
uid="arn:aws:s3:::acme-prod-data",
|
||||
name="acme-prod-data",
|
||||
type="bucket",
|
||||
region="us-east-1",
|
||||
service="s3",
|
||||
provider=aws_provider,
|
||||
tenant_id=aws_provider.tenant_id,
|
||||
)
|
||||
|
||||
mock_session = Mock()
|
||||
mock_provider = Mock()
|
||||
mock_provider._session.current_session = mock_session
|
||||
mock_initialize_provider.return_value = mock_provider
|
||||
|
||||
mock_timeline_instance = Mock()
|
||||
mock_timeline_instance.get_resource_timeline.return_value = [
|
||||
{
|
||||
"event_id": "evt-1",
|
||||
"event_time": "2026-05-04T16:55:01Z",
|
||||
"event_name": "PutBucketPolicy",
|
||||
"event_source": "s3.amazonaws.com",
|
||||
"actor": "assumed-role/AdminRole/alice",
|
||||
"actor_uid": "arn:aws:sts::123:assumed-role/AdminRole/alice",
|
||||
"actor_type": "AssumedRole",
|
||||
"source_ip_address": "1.2.3.4",
|
||||
"user_agent": "aws-cli/2.15.30",
|
||||
"request_data": {"bucketName": "acme-prod-data"},
|
||||
},
|
||||
{
|
||||
"event_id": "evt-2",
|
||||
"event_time": "2026-05-04T16:58:33Z",
|
||||
"event_name": "PutBucketAcl",
|
||||
"event_source": "s3.amazonaws.com",
|
||||
"actor": "assumed-role/AdminRole/alice",
|
||||
"error_code": "AccessDenied",
|
||||
"error_message": "User not authorized",
|
||||
},
|
||||
]
|
||||
mock_cloudtrail_timeline.return_value = mock_timeline_instance
|
||||
|
||||
response = authenticated_client.get(
|
||||
reverse("resource-events", kwargs={"pk": resource.id}),
|
||||
HTTP_ACCEPT="text/plain",
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response["Content-Type"].startswith("text/plain")
|
||||
|
||||
body = response.content.decode("utf-8")
|
||||
|
||||
# Header
|
||||
assert body.startswith("# Resource Events\n")
|
||||
assert "- Resource: arn:aws:s3:::acme-prod-data" in body
|
||||
assert "- Region: us-east-1" in body
|
||||
assert "- Events: 2" in body
|
||||
# Body
|
||||
assert "### 1. PutBucketPolicy at 2026-05-04T16:55:01Z" in body
|
||||
assert "- Status: ok" in body
|
||||
assert "- Status: ERROR(AccessDenied)" in body
|
||||
assert "- Error: User not authorized" in body
|
||||
assert '- Request: {bucketName: "acme-prod-data"}' in body
|
||||
|
||||
@patch("api.v1.views.initialize_prowler_provider")
|
||||
@patch("api.v1.views.CloudTrailTimeline")
|
||||
def test_events_default_accept_still_returns_json(
|
||||
self,
|
||||
mock_cloudtrail_timeline,
|
||||
mock_initialize_provider,
|
||||
authenticated_client,
|
||||
providers_fixture,
|
||||
):
|
||||
"""Adding text/plain renderer must not regress the default JSON:API path."""
|
||||
from api.models import Resource
|
||||
|
||||
aws_provider = providers_fixture[0]
|
||||
resource = Resource.objects.create(
|
||||
uid="arn:aws:ec2:us-east-1:123456789012:instance/i-default-accept",
|
||||
name="Default Accept Instance",
|
||||
type="instance",
|
||||
region="us-east-1",
|
||||
service="ec2",
|
||||
provider=aws_provider,
|
||||
tenant_id=aws_provider.tenant_id,
|
||||
)
|
||||
|
||||
mock_session = Mock()
|
||||
mock_provider = Mock()
|
||||
mock_provider._session.current_session = mock_session
|
||||
mock_initialize_provider.return_value = mock_provider
|
||||
|
||||
mock_timeline_instance = Mock()
|
||||
mock_timeline_instance.get_resource_timeline.return_value = [
|
||||
{
|
||||
"event_id": "evt-json",
|
||||
"event_time": "2026-05-04T16:55:01Z",
|
||||
"event_name": "RunInstances",
|
||||
"event_source": "ec2.amazonaws.com",
|
||||
"actor": "user/alice",
|
||||
}
|
||||
]
|
||||
mock_cloudtrail_timeline.return_value = mock_timeline_instance
|
||||
|
||||
response = authenticated_client.get(
|
||||
reverse("resource-events", kwargs={"pk": resource.id})
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert "json" in response["Content-Type"]
|
||||
payload = response.json()
|
||||
assert payload["data"][0]["type"] == "resource-events"
|
||||
assert payload["data"][0]["id"] == "evt-json"
|
||||
assert payload["data"][0]["attributes"]["event_name"] == "RunInstances"
|
||||
|
||||
@patch("api.v1.views.initialize_prowler_provider")
|
||||
@patch("api.v1.views.CloudTrailTimeline")
|
||||
def test_events_text_plain_no_events_renders_empty_marker(
|
||||
self,
|
||||
mock_cloudtrail_timeline,
|
||||
mock_initialize_provider,
|
||||
authenticated_client,
|
||||
providers_fixture,
|
||||
):
|
||||
"""Empty timeline still produces a valid text response, not a 500."""
|
||||
from api.models import Resource
|
||||
|
||||
aws_provider = providers_fixture[0]
|
||||
resource = Resource.objects.create(
|
||||
uid="arn:aws:ec2:us-east-1:123456789012:instance/i-empty",
|
||||
name="Empty Instance",
|
||||
type="instance",
|
||||
region="us-east-1",
|
||||
service="ec2",
|
||||
provider=aws_provider,
|
||||
tenant_id=aws_provider.tenant_id,
|
||||
)
|
||||
|
||||
mock_session = Mock()
|
||||
mock_provider = Mock()
|
||||
mock_provider._session.current_session = mock_session
|
||||
mock_initialize_provider.return_value = mock_provider
|
||||
|
||||
mock_timeline_instance = Mock()
|
||||
mock_timeline_instance.get_resource_timeline.return_value = []
|
||||
mock_cloudtrail_timeline.return_value = mock_timeline_instance
|
||||
|
||||
response = authenticated_client.get(
|
||||
reverse("resource-events", kwargs={"pk": resource.id}),
|
||||
HTTP_ACCEPT="text/plain",
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response["Content-Type"].startswith("text/plain")
|
||||
body = response.content.decode("utf-8")
|
||||
assert "- Events: 0" in body
|
||||
assert "No events recorded in the lookback window." in body
|
||||
assert (
|
||||
response.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
), f"Expected 401 for empty bearer token but got {response.status_code}"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -7003,9 +6832,9 @@ class TestFindingViewSet:
|
||||
|
||||
included_data = response.json()["included"]
|
||||
for expected_type in expected_resources:
|
||||
assert any(d.get("type") == expected_type for d in included_data), (
|
||||
f"Expected type '{expected_type}' not found in included data"
|
||||
)
|
||||
assert any(
|
||||
d.get("type") == expected_type for d in included_data
|
||||
), f"Expected type '{expected_type}' not found in included data"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"filter_name, filter_value, expected_count",
|
||||
@@ -7544,9 +7373,9 @@ class TestJWTFields:
|
||||
reverse("token-obtain"), data, format="json"
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK, (
|
||||
f"Unexpected status code: {response.status_code}"
|
||||
)
|
||||
assert (
|
||||
response.status_code == status.HTTP_200_OK
|
||||
), f"Unexpected status code: {response.status_code}"
|
||||
|
||||
access_token = response.data["attributes"]["access"]
|
||||
payload = jwt.decode(access_token, options={"verify_signature": False})
|
||||
@@ -7560,23 +7389,23 @@ class TestJWTFields:
|
||||
# Verify expected fields
|
||||
for field in expected_fields:
|
||||
assert field in payload, f"The field '{field}' is not in the JWT"
|
||||
assert payload[field] == expected_fields[field], (
|
||||
f"The value of '{field}' does not match"
|
||||
)
|
||||
assert (
|
||||
payload[field] == expected_fields[field]
|
||||
), f"The value of '{field}' does not match"
|
||||
|
||||
# Verify time fields are integers
|
||||
for time_field in ["exp", "iat", "nbf"]:
|
||||
assert time_field in payload, f"The field '{time_field}' is not in the JWT"
|
||||
assert isinstance(payload[time_field], int), (
|
||||
f"The field '{time_field}' is not an integer"
|
||||
)
|
||||
assert isinstance(
|
||||
payload[time_field], int
|
||||
), f"The field '{time_field}' is not an integer"
|
||||
|
||||
# Verify identification fields are non-empty strings
|
||||
for id_field in ["jti", "sub", "tenant_id"]:
|
||||
assert id_field in payload, f"The field '{id_field}' is not in the JWT"
|
||||
assert isinstance(payload[id_field], str) and payload[id_field], (
|
||||
f"The field '{id_field}' is not a valid string"
|
||||
)
|
||||
assert (
|
||||
isinstance(payload[id_field], str) and payload[id_field]
|
||||
), f"The field '{id_field}' is not a valid string"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -11517,9 +11346,9 @@ class TestIntegrationViewSet:
|
||||
|
||||
included_data = response.json()["included"]
|
||||
for expected_type in expected_resources:
|
||||
assert any(d.get("type") == expected_type for d in included_data), (
|
||||
f"Expected type '{expected_type}' not found in included data"
|
||||
)
|
||||
assert any(
|
||||
d.get("type") == expected_type for d in included_data
|
||||
), f"Expected type '{expected_type}' not found in included data"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"integration_type, configuration, credentials",
|
||||
@@ -12956,9 +12785,9 @@ class TestLighthouseConfigViewSet:
|
||||
)
|
||||
# Check that API key is masked with asterisks only
|
||||
masked_api_key = data["attributes"]["api_key"]
|
||||
assert all(c == "*" for c in masked_api_key), (
|
||||
"API key should contain only asterisks"
|
||||
)
|
||||
assert all(
|
||||
c == "*" for c in masked_api_key
|
||||
), "API key should contain only asterisks"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"field_name, invalid_value",
|
||||
@@ -16703,9 +16532,9 @@ class TestFindingGroupViewSet:
|
||||
assert len(data) == 2
|
||||
for item in data:
|
||||
resource = item["attributes"]["resource"]
|
||||
assert resource["resource_group"] == "storage", (
|
||||
"resource_group must be 'storage'"
|
||||
)
|
||||
assert (
|
||||
resource["resource_group"] == "storage"
|
||||
), "resource_group must be 'storage'"
|
||||
|
||||
def test_resources_name_icontains(
|
||||
self, authenticated_client, finding_groups_fixture
|
||||
@@ -17019,12 +16848,12 @@ class TestFindingGroupViewSet:
|
||||
assert response_p1.status_code == status.HTTP_200_OK
|
||||
p1_check_ids = {item["id"] for item in response_p1.json()["data"]}
|
||||
# Provider1 has scan1 with 4 checks
|
||||
assert len(p1_check_ids) == 4, (
|
||||
f"Provider1 should have 4 checks, got {len(p1_check_ids)}"
|
||||
)
|
||||
assert "cloudtrail_enabled" not in p1_check_ids, (
|
||||
"cloudtrail_enabled should NOT be in provider1"
|
||||
)
|
||||
assert (
|
||||
len(p1_check_ids) == 4
|
||||
), f"Provider1 should have 4 checks, got {len(p1_check_ids)}"
|
||||
assert (
|
||||
"cloudtrail_enabled" not in p1_check_ids
|
||||
), "cloudtrail_enabled should NOT be in provider1"
|
||||
|
||||
# Get finding groups for provider2 only
|
||||
response_p2 = authenticated_client.get(
|
||||
@@ -17034,12 +16863,12 @@ class TestFindingGroupViewSet:
|
||||
assert response_p2.status_code == status.HTTP_200_OK
|
||||
p2_check_ids = {item["id"] for item in response_p2.json()["data"]}
|
||||
# Provider2 has scan2 with 1 check
|
||||
assert len(p2_check_ids) == 1, (
|
||||
f"Provider2 should have 1 check, got {len(p2_check_ids)}"
|
||||
)
|
||||
assert "cloudtrail_enabled" in p2_check_ids, (
|
||||
"cloudtrail_enabled should be in provider2"
|
||||
)
|
||||
assert (
|
||||
len(p2_check_ids) == 1
|
||||
), f"Provider2 should have 1 check, got {len(p2_check_ids)}"
|
||||
assert (
|
||||
"cloudtrail_enabled" in p2_check_ids
|
||||
), "cloudtrail_enabled should be in provider2"
|
||||
|
||||
# Test provider_type filter actually filters data
|
||||
def test_finding_groups_provider_type_filter_actually_filters(
|
||||
@@ -17062,9 +16891,9 @@ class TestFindingGroupViewSet:
|
||||
{"filter[inserted_at]": TODAY, "filter[provider_type]": "gcp"},
|
||||
)
|
||||
assert response_gcp.status_code == status.HTTP_200_OK
|
||||
assert len(response_gcp.json()["data"]) == 0, (
|
||||
"GCP filter should return 0 results"
|
||||
)
|
||||
assert (
|
||||
len(response_gcp.json()["data"]) == 0
|
||||
), "GCP filter should return 0 results"
|
||||
|
||||
def test_finding_groups_pagination(
|
||||
self, authenticated_client, finding_groups_fixture
|
||||
|
||||
@@ -109,7 +109,6 @@ from tasks.tasks import (
|
||||
from api.attack_paths import database as graph_database
|
||||
from api.attack_paths import get_queries_for_provider, get_query_by_id
|
||||
from api.attack_paths import views_helpers as attack_paths_views_helpers
|
||||
from api.events import views_helpers as events_views_helpers
|
||||
from api.base_views import BaseRLSViewSet, BaseTenantViewset, BaseUserViewset
|
||||
from api.compliance import (
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
|
||||
@@ -3391,9 +3390,6 @@ class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
description=(
|
||||
"Retrieve events showing modification history for a resource. "
|
||||
"Returns who modified the resource and when. Currently only available for AWS resources.\n\n"
|
||||
"**Content negotiation:** send `Accept: text/plain` to receive a "
|
||||
"compact markdown report optimized for LLM consumption "
|
||||
"instead of the default JSON:API document.\n\n"
|
||||
"**Note:** Some events may not appear due to CloudTrail indexing limitations. "
|
||||
"Not all AWS API calls record the resource identifier in a searchable format."
|
||||
),
|
||||
@@ -3441,7 +3437,6 @@ class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
methods=["get"],
|
||||
url_name="events",
|
||||
filter_backends=[], # Disable filters - we're calling external API, not filtering queryset
|
||||
renderer_classes=[APIJSONRenderer, PlainTextRenderer],
|
||||
)
|
||||
def events(self, request, pk=None):
|
||||
"""Get events for a resource."""
|
||||
@@ -3574,15 +3569,6 @@ class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
resource_uid=resource.uid,
|
||||
)
|
||||
|
||||
if isinstance(request.accepted_renderer, PlainTextRenderer):
|
||||
text = events_views_helpers.serialize_events_as_text(
|
||||
events,
|
||||
resource=resource,
|
||||
lookback_days=lookback_days,
|
||||
write_events_only=not include_read_events,
|
||||
)
|
||||
return Response(text)
|
||||
|
||||
serializer = ResourceEventSerializer(events, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
@@ -47,9 +47,6 @@ from prowler.lib.outputs.compliance.csa.csa_oraclecloud import OracleCloudCSA
|
||||
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
|
||||
from prowler.lib.outputs.compliance.ens.ens_azure import AzureENS
|
||||
from prowler.lib.outputs.compliance.ens.ens_gcp import GCPENS
|
||||
from prowler.lib.outputs.compliance.asd_essential_eight.asd_essential_eight_aws import (
|
||||
ASDEssentialEightAWS,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_azure import AzureISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_gcp import GCPISO27001
|
||||
@@ -103,7 +100,6 @@ COMPLIANCE_CLASS_MAP = {
|
||||
(lambda name: name.startswith("ccc_"), CCC_AWS),
|
||||
(lambda name: name.startswith("c5_"), AWSC5),
|
||||
(lambda name: name.startswith("csa_"), AWSCSA),
|
||||
(lambda name: name == "asd_essential_eight_aws", ASDEssentialEightAWS),
|
||||
],
|
||||
"azure": [
|
||||
(lambda name: name.startswith("cis_"), AzureCIS),
|
||||
|
||||
@@ -202,9 +202,8 @@ def _get_attack_surface_mapping_from_provider(provider_type: str) -> dict:
|
||||
"iam_inline_policy_allows_privilege_escalation",
|
||||
},
|
||||
"ec2-imdsv1": {
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"ec2_instance_account_imdsv2_enabled",
|
||||
}, # AWS only - instance-level IMDSv1 exposure and account IMDS defaults
|
||||
"ec2_instance_imdsv2_enabled"
|
||||
}, # AWS only - IMDSv1 enabled findings
|
||||
}
|
||||
for category_name, check_ids in attack_surface_check_mappings.items():
|
||||
if check_ids is None:
|
||||
|
||||
@@ -3853,7 +3853,6 @@ class TestAggregateAttackSurface:
|
||||
in result["privilege-escalation"]
|
||||
)
|
||||
assert "ec2_instance_imdsv2_enabled" in result["ec2-imdsv1"]
|
||||
assert "ec2_instance_account_imdsv2_enabled" in result["ec2-imdsv1"]
|
||||
|
||||
@patch("tasks.jobs.scan.AttackSurfaceOverview.objects.bulk_create")
|
||||
@patch("tasks.jobs.scan.Finding.all_objects.filter")
|
||||
|
||||
@@ -73,58 +73,6 @@ The best reference to understand how to implement a new service is following the
|
||||
- AWS API calls are wrapped in try/except blocks, with specific handling for `ClientError` and generic exceptions, always logging errors.
|
||||
- If ARN is not present for some resource, it can be constructed using string interpolation, always including partition, service, region, account, and resource ID.
|
||||
- Tags and additional attributes that cannot be retrieved from the default call, should be collected and stored for each resource using dedicated methods and threading using the resource object list as iterator.
|
||||
- When accessing dictionary values from AWS API responses, always use `.get()` with a default value instead of direct dictionary access (e.g., `response.get("Policies", {})` instead of `response["Policies"]`). AWS API responses may not always include all keys, and direct access can cause `KeyError` exceptions that break the entire scan for that service.
|
||||
|
||||
### Extending an Existing Service with New Attributes
|
||||
|
||||
When adding a new check that requires data not yet collected by an existing service, you need to extend the service by adding new attributes to its resource models and updating the data collection methods. This is a common contributor task that follows a consistent pattern:
|
||||
|
||||
1. **Identify the missing data**: Determine which AWS API call provides the data you need and whether it's already being called by the service.
|
||||
|
||||
2. **Add new attributes to the resource model**: Extend the Pydantic `BaseModel` class for the resource with the new fields. Use `Optional` types with `None` as the default value to maintain backward compatibility with existing checks.
|
||||
|
||||
3. **Update the data collection method**: Modify the existing method that fetches resource details to also extract and store the new attributes. If no existing method fetches the data, add a new method and call it in the constructor using `self.__threading_call__` if possible.
|
||||
|
||||
4. **Use safe dictionary access**: When extracting values from API responses, always use `.get()` with appropriate defaults to prevent `KeyError` exceptions when the API doesn't return certain fields.
|
||||
|
||||
#### Example: Adding DKIM Status to SES Identities
|
||||
|
||||
```python
|
||||
# Step 1 & 2: Add new fields to the resource model
|
||||
class Identity(BaseModel):
|
||||
name: str
|
||||
arn: str
|
||||
region: str
|
||||
type: Optional[str]
|
||||
policy: Optional[dict] = None
|
||||
tags: Optional[list] = []
|
||||
# New attributes for DKIM check
|
||||
dkim_status: Optional[str] = None
|
||||
dkim_signing_attributes_origin: Optional[str] = None
|
||||
|
||||
# Step 3: Update the data collection method
|
||||
def _get_email_identities(self, identity):
|
||||
try:
|
||||
regional_client = self.regional_clients[identity.region]
|
||||
identity_attributes = regional_client.get_email_identity(
|
||||
EmailIdentity=identity.name
|
||||
)
|
||||
# Step 4: Use .get() for safe dictionary access
|
||||
for content_key, content_value in identity_attributes.get("Policies", {}).items():
|
||||
identity.policy = loads(content)
|
||||
identity.tags = identity_attributes.get("Tags", [])
|
||||
# Extract new DKIM attributes
|
||||
identity.dkim_status = identity_attributes.get("DkimStatus")
|
||||
identity.dkim_signing_attributes_origin = (
|
||||
identity_attributes.get("DkimSigningAttributesOrigin")
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
```
|
||||
|
||||
5. **Update the service tests**: Add the new attributes to the test mock data and assertions to verify correct data extraction.
|
||||
|
||||
## Specific Patterns in AWS Checks
|
||||
|
||||
|
||||
@@ -215,6 +215,3 @@ Also is important to keep all code examples as short as possible, including the
|
||||
| e5 | M365 and Azure Entra checks enabled by or dependent on an E5 license (e.g., advanced threat protection, audit, DLP, and eDiscovery) |
|
||||
| privilege-escalation | Detects IAM policies or permissions that allow identities to elevate their privileges beyond their intended scope, potentially gaining administrator or higher-level access through specific action combinations |
|
||||
| ec2-imdsv1 | Identifies EC2 instances using Instance Metadata Service version 1 (IMDSv1), which is vulnerable to SSRF attacks and should be replaced with IMDSv2 for enhanced security |
|
||||
| vercel-hobby-plan | Vercel checks whose audited feature is available on the Hobby plan (and therefore also on Pro and Enterprise plans) |
|
||||
| vercel-pro-plan | Vercel checks whose audited feature requires a Pro plan or higher, including features also available on Enterprise or via supported paid add-ons for Pro plans |
|
||||
| vercel-enterprise-plan | Vercel checks whose audited feature requires the Enterprise plan |
|
||||
|
||||
@@ -27,28 +27,14 @@ The most common high level steps to create a new check are:
|
||||
|
||||
### Naming Format for Checks
|
||||
|
||||
If you already know the check name when creating a request or implementing a check, use a descriptive identifier with lowercase letters and underscores only.
|
||||
|
||||
Recommended patterns:
|
||||
|
||||
- `<service>_<resource>_<best_practice>`
|
||||
Checks must be named following the format: `service_subservice_resource_action`.
|
||||
|
||||
The name components are:
|
||||
|
||||
- `service` – The main service or product area being audited (e.g., ec2, entra, iam, bedrock).
|
||||
- `resource` – The resource, feature, or configuration being evaluated. It can be a single word or a compound phrase joined with underscores (e.g., instance, policy, guardrail, sensitive_information_filter).
|
||||
- `best_practice` – The expected secure state or best practice being checked (e.g., enabled, encrypted, restricted, configured, not_publicly_accessible).
|
||||
|
||||
Additional guidance:
|
||||
|
||||
- Use underscores only. Do not use hyphens.
|
||||
- Keep the name specific enough to describe the behavior of the check.
|
||||
- The first segment should match the service or product area whenever possible.
|
||||
|
||||
Examples:
|
||||
|
||||
- `s3_bucket_versioning_enabled`
|
||||
- `bedrock_guardrail_sensitive_information_filter_enabled`
|
||||
- `service` – The main service being audited (e.g., ec2, entra, iam, etc.)
|
||||
- `subservice` – An individual component or subset of functionality within the service that is being audited. This may correspond to a shortened version of the class attribute accessed within the check. If there is no subservice, just omit.
|
||||
- `resource` – The specific resource type being evaluated (e.g., instance, policy, role, etc.)
|
||||
- `action` – The security aspect or configuration being checked (e.g., public, encrypted, enabled, etc.)
|
||||
|
||||
### File Creation
|
||||
|
||||
@@ -401,7 +387,7 @@ Provides both code examples and best practice recommendations for addressing the
|
||||
|
||||
#### Categories
|
||||
|
||||
One or more functional groupings used for execution filtering (e.g., `internet-exposed`). Categories must match the predefined values enforced by `CheckMetadata`; adding a new category requires updating the validator and the metadata documentation.
|
||||
One or more functional groupings used for execution filtering (e.g., `internet-exposed`). You can define new categories just by adding to this field.
|
||||
|
||||
For the complete list of available categories, see [Categories Guidelines](/developer-guide/check-metadata-guidelines#categories-guidelines).
|
||||
|
||||
|
||||
@@ -134,22 +134,6 @@ prek installed at `.git/hooks/pre-commit`
|
||||
If pre-commit hooks were previously installed, run `prek install --overwrite` to replace the existing hook. Otherwise, both tools will run on each commit.
|
||||
</Warning>
|
||||
|
||||
#### Enable TruffleHog as a Pre-Push Hook
|
||||
|
||||
By default, only `pre-commit` hooks are installed. To enable [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) secret scanning on every push, install the `pre-push` hook type explicitly:
|
||||
|
||||
```shell
|
||||
prek install --hook-type pre-push
|
||||
```
|
||||
|
||||
Successful installation should produce the following output:
|
||||
|
||||
```shell
|
||||
prek installed at `.git/hooks/pre-push`
|
||||
```
|
||||
|
||||
Once installed, TruffleHog runs before each push and blocks the operation when verified secrets are detected.
|
||||
|
||||
### Code Quality and Security Checks
|
||||
|
||||
Before merging pull requests, several automated checks and utilities ensure code security and updated dependencies:
|
||||
|
||||
@@ -1003,7 +1003,7 @@ class ProwlerArgumentParser:
|
||||
formatter_class=RawTextHelpFormatter,
|
||||
usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,nhn,dashboard,iac,your_provider} ...",
|
||||
epilog="""
|
||||
Available Providers:
|
||||
Available Cloud Providers:
|
||||
{aws,azure,gcp,kubernetes,m365,github,iac,nhn,your_provider}
|
||||
aws AWS Provider
|
||||
azure Azure Provider
|
||||
|
||||
@@ -1,131 +0,0 @@
|
||||
---
|
||||
title: 'Prowler Studio'
|
||||
---
|
||||
|
||||
**Prowler Studio is an AI workflow that ensures Claude Code follows Prowler's skills, guardrails, and best practices when creating new security checks.** What lands in the resulting pull request is consistent, tested, and ready for human review — not half-correct boilerplate that needs to be rewritten.
|
||||
|
||||
<Info>
|
||||
**Contributor Tool**: Prowler Studio is a workflow for advanced contributors adding new Prowler security checks. It is not part of Prowler Cloud, Prowler App, or Prowler CLI.
|
||||
</Info>
|
||||
|
||||
<Warning>
|
||||
**Preview Feature**: Prowler Studio is under active development and breaking changes are expected. Please report issues or share feedback on [GitHub](https://github.com/prowler-cloud/prowler-studio/issues) or in the [Slack community](https://goto.prowler.com/slack).
|
||||
</Warning>
|
||||
|
||||
<Card title="Prowler Studio Repository" icon="github" href="https://github.com/prowler-cloud/prowler-studio" horizontal>
|
||||
Clone the source code, install Prowler Studio, and explore the agent workflow in detail.
|
||||
</Card>
|
||||
|
||||
## The Problem
|
||||
|
||||
Adding a new check to [Prowler](https://github.com/prowler-cloud/prowler) is more than writing detection logic. A correct check has to:
|
||||
|
||||
- Match Prowler's exact service and check folder structure and naming conventions
|
||||
- Wire up metadata, severity, remediation, tests, and compliance mappings
|
||||
- Mirror the patterns used by the hundreds of existing checks in the same provider
|
||||
- Actually load when Prowler scans for available checks — silent structural mistakes are easy to make
|
||||
|
||||
Asking a general-purpose AI assistant to do this usually means guessing. It misses conventions, skips tests, or invents structure that looks right but does not load. The result is a half-correct PR that needs to be reviewed line by line or rewritten.
|
||||
|
||||
## The Solution
|
||||
|
||||
Prowler Studio enforces the workflow end-to-end. Describe the check once — a markdown ticket, a Jira issue, or a GitHub issue — and the workflow:
|
||||
|
||||
1. **Loads Prowler-specific skills into every agent.** Every step starts with the same context an experienced Prowler engineer would have in mind. See [AI Skills System](/developer-guide/ai-skills) for how skills are structured.
|
||||
2. **Runs specialized agents in sequence.** Implementation → testing → compliance mapping → review → PR creation. Each agent has one job and a tight scope.
|
||||
3. **Verifies as it goes.** The check must load in Prowler. Tests must pass. If something fails, the agent fixes it and re-runs (up to a bounded number of attempts) before moving on.
|
||||
4. **Produces a complete pull request.** Branch, passing check, tests, compliance mappings, and a pull request waiting for human review.
|
||||
|
||||
The result is a consistent starting point, every time, on every supported provider.
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Install
|
||||
|
||||
Prowler Studio requires [`uv`](https://docs.astral.sh/uv/getting-started/installation/) — see the official [installation guide](https://docs.astral.sh/uv/getting-started/installation/).
|
||||
|
||||
```bash
|
||||
git clone https://github.com/prowler-cloud/prowler-studio
|
||||
cd prowler-studio
|
||||
uv sync
|
||||
source .venv/bin/activate
|
||||
```
|
||||
|
||||
### Describe the Check
|
||||
|
||||
A ticket is a structured markdown description of the check to create. It is the only input the workflow needs; every agent (implementation, testing, compliance mapping, review, PR creation) uses it as the source of truth, so the more concrete it is, the closer the first PR will land to the desired outcome.
|
||||
|
||||
The ticket can be supplied in three ways:
|
||||
|
||||
- **Local markdown file** → `--ticket path/to/ticket.md`
|
||||
- **Jira issue** → `--jira-url https://...` (uses the issue body)
|
||||
- **GitHub issue** → `--github-url https://...` (uses the issue body)
|
||||
|
||||
The content should follow the **New Check Request** template:
|
||||
|
||||
- The local copy at [`check_ticket_template.md`](https://github.com/prowler-cloud/prowler-studio/blob/main/check_ticket_template.md) covers `--ticket` and Jira tickets.
|
||||
- A prefilled GitHub form is also available: [Create a New Check Request issue](https://github.com/prowler-cloud/prowler/issues/new?template=new-check-request.yml).
|
||||
|
||||
Sections marked *Optional* can be skipped; everything else helps the agents make the right decisions.
|
||||
|
||||
### Run the Workflow
|
||||
|
||||
From a local markdown ticket:
|
||||
|
||||
```bash
|
||||
prowler-studio --ticket check_ticket.md
|
||||
```
|
||||
|
||||
From a Jira ticket:
|
||||
|
||||
```bash
|
||||
prowler-studio --jira-url https://mycompany.atlassian.net/browse/PROJ-123
|
||||
```
|
||||
|
||||
From a GitHub issue:
|
||||
|
||||
```bash
|
||||
prowler-studio --github-url https://github.com/owner/repo/issues/123
|
||||
```
|
||||
|
||||
<Note>
|
||||
Provide exactly one of `--ticket`, `--jira-url`, or `--github-url`.
|
||||
</Note>
|
||||
|
||||
Keep changes local (no push, no pull request):
|
||||
|
||||
```bash
|
||||
prowler-studio -b feat/my-check --ticket check_ticket.md --local
|
||||
```
|
||||
|
||||
### What You Get
|
||||
|
||||
After a successful run the working environment contains:
|
||||
|
||||
- A new branch on a clean Prowler worktree containing the check, metadata, tests, and compliance mappings
|
||||
- A pull request opened against Prowler (skipped with `--local`)
|
||||
- A timestamped log file under `logs/` capturing every step the agents took
|
||||
|
||||
## CLI Options
|
||||
|
||||
| Option | Short | Description |
|
||||
|--------|-------|-------------|
|
||||
| `--branch` | `-b` | Branch name (default: `feat/<ticket>-<check_name>` or `feat/<check_name>`) |
|
||||
| `--ticket` | `-t` | Path to a markdown check ticket file |
|
||||
| `--jira-url` | `-j` | Jira ticket URL (e.g., `https://mycompany.atlassian.net/browse/PROJ-123`) |
|
||||
| `--github-url` | `-g` | GitHub issue URL (e.g., `https://github.com/owner/repo/issues/123`) |
|
||||
| `--working-dir` | `-w` | Working directory for the Prowler clone (default: `./working`) |
|
||||
| `--no-worktree` | | Legacy mode — work directly on the main clone instead of using worktrees |
|
||||
| `--cleanup-worktree` | | Remove the worktree after a successful pull request is created |
|
||||
| `--local` | | Keep changes local — skip push and pull request creation |
|
||||
|
||||
## Configuration
|
||||
|
||||
Set these environment variables depending on the input source:
|
||||
|
||||
| Variable | When Needed | Purpose |
|
||||
|----------|-------------|---------|
|
||||
| `GITHUB_TOKEN` | `--github-url` (recommended) | Higher GitHub API rate limits and access to private issues |
|
||||
| `JIRA_SITE_URL` | `--jira-url` | Jira site, e.g. `https://mycompany.atlassian.net` |
|
||||
| `JIRA_EMAIL` | `--jira-url` | Email of the Jira account used to fetch the ticket |
|
||||
| `JIRA_API_TOKEN` | `--jira-url` | API token for the Jira account |
|
||||
+1
-2
@@ -365,8 +365,7 @@
|
||||
"developer-guide/security-compliance-framework",
|
||||
"developer-guide/lighthouse-architecture",
|
||||
"developer-guide/mcp-server",
|
||||
"developer-guide/ai-skills",
|
||||
"developer-guide/prowler-studio"
|
||||
"developer-guide/ai-skills"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -32,11 +32,11 @@ Access Prowler App by logging in with **email and password**.
|
||||
|
||||
<img src="/images/log-in.png" alt="Log In" width="285" />
|
||||
|
||||
## Add Provider
|
||||
## Add Cloud Provider
|
||||
|
||||
Configure a provider for scanning:
|
||||
Configure a cloud provider for scanning:
|
||||
|
||||
1. Navigate to `Settings > Providers` and click `Add Provider`.
|
||||
1. Navigate to `Settings > Cloud Providers` and click `Add Account`.
|
||||
2. Select the cloud provider.
|
||||
3. Enter the provider's identifier (Optional: Add an alias):
|
||||
- **AWS**: Account ID
|
||||
|
||||
@@ -121,8 +121,8 @@ To update the environment file:
|
||||
Edit the `.env` file and change version values:
|
||||
|
||||
```env
|
||||
PROWLER_UI_VERSION="5.25.2"
|
||||
PROWLER_API_VERSION="5.25.2"
|
||||
PROWLER_UI_VERSION="5.25.1"
|
||||
PROWLER_API_VERSION="5.25.1"
|
||||
```
|
||||
|
||||
<Note>
|
||||
|
||||
@@ -159,40 +159,6 @@ When these environment variables are set, the API will use them directly instead
|
||||
A fix addressing this permission issue is being evaluated in [PR #9953](https://github.com/prowler-cloud/prowler/pull/9953).
|
||||
</Note>
|
||||
|
||||
### Scan Stuck in Executing State After Worker Crash
|
||||
|
||||
When running Prowler App via Docker Compose, a scan may remain indefinitely in the `executing` state if the worker process crashes (for example, due to an Out of Memory condition) before it can update the scan status. Since it is not currently possible to cancel a scan in `executing` state through the UI, the workaround is to manually update the scan record in the database.
|
||||
|
||||
**Root Cause:**
|
||||
|
||||
The Celery worker process terminates unexpectedly (OOM, node failure, etc.) before transitioning the scan state to `completed` or `failed`. The scan record remains in `executing` with no active process to advance it.
|
||||
|
||||
**Solution:**
|
||||
|
||||
Connect to the database using the `prowler_admin` user. Due to Row-Level Security (RLS), the default database user cannot see scan records — you must use `prowler_admin`:
|
||||
|
||||
```bash
|
||||
psql -U prowler_admin -d prowler_db
|
||||
```
|
||||
|
||||
Identify the stuck scan by filtering for scans in `executing` state:
|
||||
|
||||
```sql
|
||||
SELECT id, name, state, started_at FROM scans WHERE state = 'executing';
|
||||
```
|
||||
|
||||
Update the scan state to `failed` using the scan ID:
|
||||
|
||||
```sql
|
||||
UPDATE scans SET state = 'failed' WHERE id = '<scan-id>';
|
||||
```
|
||||
|
||||
After this change, the scan will appear as failed in the UI and you can launch a new scan.
|
||||
|
||||
<Note>
|
||||
A feature to cancel executing scans directly from the UI is being tracked in [GitHub Issue #6893](https://github.com/prowler-cloud/prowler/issues/6893).
|
||||
</Note>
|
||||
|
||||
### SAML/OAuth ACS URL Incorrect When Running Behind a Proxy or Load Balancer
|
||||
|
||||
See [GitHub Issue #9724](https://github.com/prowler-cloud/prowler/issues/9724) for more details.
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
---
|
||||
title: 'Prowler Check Kreator'
|
||||
---
|
||||
|
||||
<Note>
|
||||
Currently, this tool is only available for creating checks for the AWS provider.
|
||||
|
||||
</Note>
|
||||
<Note>
|
||||
If you are looking for a way to create new checks for all the supported providers, you can use [Prowler Studio](https://github.com/prowler-cloud/prowler-studio), it is an AI-powered toolkit for generating and managing security checks for Prowler (better version of the Check Kreator).
|
||||
|
||||
</Note>
|
||||
## Introduction
|
||||
|
||||
**Prowler Check Kreator** is a utility designed to streamline the creation of new checks for Prowler. This tool generates all necessary files required to add a new check to the Prowler repository. Specifically, it creates:
|
||||
|
||||
- A dedicated folder for the check.
|
||||
- The main check script.
|
||||
- A metadata file with essential details.
|
||||
- A folder and file structure for testing the check.
|
||||
|
||||
## Usage
|
||||
|
||||
To use the tool, execute the main script with the following command:
|
||||
|
||||
```bash
|
||||
python util/prowler_check_kreator/prowler_check_kreator.py <prowler_provider> <check_name>
|
||||
```
|
||||
|
||||
Parameters:
|
||||
|
||||
- `<prowler_provider>`: Currently only AWS is supported.
|
||||
- `<check_name>`: The name you wish to assign to the new check.
|
||||
|
||||
## AI integration
|
||||
|
||||
This tool optionally integrates AI to assist in generating the check code and metadata file content. When AI assistance is chosen, the tool uses [Gemini](https://gemini.google.com/) to produce preliminary code and metadata.
|
||||
|
||||
<Note>
|
||||
For this feature to work, you must have the library `google-generativeai` installed in your Python environment.
|
||||
|
||||
</Note>
|
||||
<Warning>
|
||||
AI-generated code and metadata might contain errors or require adjustments to align with specific Prowler requirements. Carefully review all AI-generated content before committing.
|
||||
|
||||
</Warning>
|
||||
To enable AI assistance, simply confirm when prompted by the tool. Additionally, ensure that the `GEMINI_API_KEY` environment variable is set with a valid Gemini API key. For instructions on obtaining your API key, refer to the [Gemini documentation](https://ai.google.dev/gemini-api/docs/api-key).
|
||||
@@ -40,13 +40,13 @@ Before you begin, make sure you have:
|
||||
### Step 2: Access Prowler Cloud
|
||||
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app)
|
||||
2. Go to "Configuration" > "Providers"
|
||||
2. Go to "Configuration" > "Cloud Providers"
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click "Add Provider"
|
||||
3. Click "Add Cloud Provider"
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select "Alibaba Cloud"
|
||||
|
||||
|
||||
@@ -19,13 +19,13 @@ title: 'Getting Started With AWS on Prowler'
|
||||
### Step 2: Access Prowler Cloud
|
||||
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app)
|
||||
2. Go to "Configuration" > "Providers"
|
||||
2. Go to "Configuration" > "Cloud Providers"
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click "Add Provider"
|
||||
3. Click "Add Cloud Provider"
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select "Amazon Web Services"
|
||||
|
||||
|
||||
@@ -35,13 +35,13 @@ For detailed instructions on how to create the Service Principal and configure p
|
||||
### Step 2: Access Prowler Cloud
|
||||
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app)
|
||||
2. Navigate to `Configuration` > `Providers`
|
||||
2. Navigate to `Configuration` > `Cloud Providers`
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click on `Add Provider`
|
||||
3. Click on `Add Cloud Provider`
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select `Microsoft Azure`
|
||||
|
||||
|
||||
@@ -42,13 +42,13 @@ The Account ID is a 32-character hexadecimal string (e.g., `372e67954025e0ba6aaa
|
||||
### Step 2: Open Prowler Cloud
|
||||
|
||||
1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app).
|
||||
2. Navigate to "Configuration" > "Providers".
|
||||
2. Navigate to "Configuration" > "Cloud Providers".
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click "Add Provider".
|
||||
3. Click "Add Cloud Provider".
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select "Cloudflare".
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ title: 'Getting Started With GCP on Prowler'
|
||||
### Step 2: Access Prowler Cloud
|
||||
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app)
|
||||
2. Go to "Configuration" > "Providers"
|
||||
2. Go to "Configuration" > "Cloud Providers"
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click "Add Provider"
|
||||
3. Click "Add Cloud Provider"
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select "Google Cloud Platform"
|
||||
|
||||
|
||||
@@ -275,7 +275,7 @@ For step-by-step setup instructions for Prowler Cloud, see the [Getting Started
|
||||
|
||||
### Using Personal Access Token
|
||||
|
||||
1. In Prowler Cloud, navigate to **Configuration** > **Providers** > **Add Provider** > **GitHub**.
|
||||
1. In Prowler Cloud, navigate to **Configuration** > **Cloud Providers** > **Add Cloud Provider** > **GitHub**.
|
||||
|
||||
2. Enter your GitHub Account ID (username or organization name).
|
||||
|
||||
|
||||
@@ -49,13 +49,13 @@ Before adding GitHub to Prowler Cloud/App, ensure you have:
|
||||
### Step 1: Access Prowler Cloud/App
|
||||
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app)
|
||||
2. Go to **Configuration** → **Providers**
|
||||
2. Go to **Configuration** → **Cloud Providers**
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click **Add Provider**
|
||||
3. Click **Add Cloud Provider**
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select **GitHub**
|
||||
|
||||
|
||||
@@ -43,13 +43,13 @@ The Customer ID starts with the letter "C" followed by alphanumeric characters (
|
||||
### Step 2: Open Prowler Cloud
|
||||
|
||||
1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app).
|
||||
2. Navigate to "Configuration" > "Providers".
|
||||
2. Navigate to "Configuration" > "Cloud Providers".
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click "Add Provider".
|
||||
3. Click "Add Cloud Provider".
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select "Google Workspace".
|
||||
|
||||
|
||||
@@ -42,13 +42,13 @@ Scanner selection is not configurable in Prowler App. Default scanners, misconfi
|
||||
### Step 1: Access Prowler Cloud/App
|
||||
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app)
|
||||
2. Go to "Configuration" > "Providers"
|
||||
2. Go to "Configuration" > "Cloud Providers"
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click "Add Provider"
|
||||
3. Click "Add Cloud Provider"
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select "Infrastructure as Code"
|
||||
|
||||
|
||||
@@ -34,13 +34,13 @@ Prowler Cloud does not support scanner selection. The vulnerability, secret, and
|
||||
### Step 1: Access Prowler Cloud
|
||||
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app)
|
||||
2. Navigate to "Configuration" > "Providers"
|
||||
2. Navigate to "Configuration" > "Cloud Providers"
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click "Add Provider"
|
||||
3. Click "Add Cloud Provider"
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select "Container Registry"
|
||||
|
||||
|
||||
@@ -7,13 +7,13 @@ title: 'Getting Started with Kubernetes'
|
||||
### Step 1: Access Prowler Cloud/App
|
||||
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app)
|
||||
2. Go to "Configuration" > "Providers"
|
||||
2. Go to "Configuration" > "Cloud Providers"
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click "Add Provider"
|
||||
3. Click "Add Cloud Provider"
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select "Kubernetes"
|
||||
|
||||
|
||||
@@ -42,13 +42,13 @@ Set up authentication for Microsoft 365 with the [Microsoft 365 Authentication](
|
||||
### Step 2: Open Prowler Cloud
|
||||
|
||||
1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app).
|
||||
2. Navigate to "Configuration" > "Providers".
|
||||
2. Navigate to "Configuration" > "Cloud Providers".
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click "Add Provider".
|
||||
3. Click "Add Cloud Provider".
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select "Microsoft 365".
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ If **Require IP Access List for the Atlas Administration API** is enabled in you
|
||||
|
||||
### Step 1: Add the provider
|
||||
|
||||
1. Navigate to **Providers** and click **Add Provider**.
|
||||
1. Navigate to **Cloud Providers** and click **Add Cloud Provider**.
|
||||

|
||||
2. Select **MongoDB Atlas** from the provider list.
|
||||
3. Enter your **Organization ID** (24 hex characters). This value is visible in the Atlas UI under **Organization Settings**.
|
||||
|
||||
@@ -16,8 +16,8 @@ The following steps apply to Prowler Cloud and the self-hosted Prowler App.
|
||||
|
||||
### Step 2: Access Prowler Cloud
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app).
|
||||
2. Go to **Configuration** → **Providers** and click **Add Provider**.
|
||||

|
||||
2. Go to **Configuration** → **Cloud Providers** and click **Add Cloud Provider**.
|
||||

|
||||
3. Select **Oracle Cloud** and enter the **Tenancy OCID** and an optional alias, then choose **Next**.
|
||||

|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ Before running Prowler with the OpenStack provider, ensure you have:
|
||||
|
||||
### Step 1: Add the Provider
|
||||
|
||||
1. Navigate to "Providers" and click "Add Provider".
|
||||
1. Navigate to "Cloud Providers" and click "Add Cloud Provider".
|
||||

|
||||
2. Select "OpenStack" from the provider list.
|
||||
3. Enter the "Project ID" from the OpenStack provider.
|
||||
|
||||
@@ -29,13 +29,13 @@ Set up authentication for Vercel with the [Vercel Authentication](/user-guide/pr
|
||||
### Step 1: Add the Provider
|
||||
|
||||
1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app).
|
||||
2. Navigate to "Configuration" > "Providers".
|
||||
2. Navigate to "Configuration" > "Cloud Providers".
|
||||
|
||||

|
||||

|
||||
|
||||
3. Click "Add Provider".
|
||||
3. Click "Add Cloud Provider".
|
||||
|
||||

|
||||

|
||||
|
||||
4. Select "Vercel".
|
||||
|
||||
@@ -160,25 +160,3 @@ Prowler for Vercel includes security checks across the following services:
|
||||
| **Project** | Deployment protection, environment variable security, fork protection, and skew protection |
|
||||
| **Security** | Web Application Firewall (WAF), rate limiting, IP blocking, and managed rulesets |
|
||||
| **Team** | SSO enforcement, directory sync, member access, and invitation hygiene |
|
||||
|
||||
## Checks With Explicit Plan-Based Behavior
|
||||
|
||||
Prowler currently includes 26 Vercel checks. The 11 checks below have explicit billing-plan handling in the provider metadata or check logic. When the scanned scope reports a billing plan, Prowler adds plan-aware context to findings for these checks. If the API does not expose the required configuration, Prowler may return `MANUAL` and require verification in the Vercel dashboard.
|
||||
|
||||
| Check ID | Hobby | Pro | Enterprise | Notes |
|
||||
|----------|-------|-----|------------|-------|
|
||||
| `project_password_protection_enabled` | Not available | Available as a paid add-on | Available | Checks password protection for deployments |
|
||||
| `project_production_deployment_protection_enabled` | Not available | Available with supported paid deployment protection options | Available | Checks protection for production deployments |
|
||||
| `project_skew_protection_enabled` | Not available | Available | Available | Checks skew protection during rollouts |
|
||||
| `security_custom_rules_configured` | Not available | Available | Available | Returns `MANUAL` when the firewall configuration cannot be assessed from the API |
|
||||
| `security_ip_blocking_rules_configured` | Not available | Available | Available | Returns `MANUAL` when the firewall configuration cannot be assessed from the API |
|
||||
| `team_saml_sso_enabled` | Not available | Available | Available | Checks team SAML SSO configuration |
|
||||
| `team_saml_sso_enforced` | Not available | Available | Available | Checks SAML SSO enforcement for all team members |
|
||||
| `team_directory_sync_enabled` | Not available | Not available | Available | Checks SCIM directory sync |
|
||||
| `security_managed_rulesets_enabled` | Bot Protection and AI Bots managed rulesets | Bot Protection and AI Bots managed rulesets | All managed rulesets, including OWASP Core Ruleset | Returns `MANUAL` when the firewall configuration cannot be assessed from the API |
|
||||
| `security_rate_limiting_configured` | Not available | Available | Available | Returns `MANUAL` when the firewall configuration cannot be assessed from the API |
|
||||
| `security_waf_enabled` | Not available | Available | Available | Returns `MANUAL` when the firewall configuration cannot be assessed from the API |
|
||||
|
||||
<Note>
|
||||
The five firewall-related checks (`security_waf_enabled`, `security_custom_rules_configured`, `security_ip_blocking_rules_configured`, `security_rate_limiting_configured`, and `security_managed_rulesets_enabled`) return `MANUAL` when the firewall configuration endpoint is not accessible from the API. The other 15 current Vercel checks do not currently include plan-specific handling in provider logic, but every Vercel check includes exactly one billing-plan metadata category (`vercel-hobby-plan`, `vercel-pro-plan`, or `vercel-enterprise-plan`) alongside its functional security category.
|
||||
</Note>
|
||||
|
||||
@@ -123,7 +123,7 @@ The Roles section in Prowler is designed to facilitate the assignment of custom
|
||||
</Note>
|
||||
### Provider Groups
|
||||
|
||||
Provider Groups control visibility across specific providers. When creating a new role, you can assign specific groups to define their Provider visibility. This ensures that users with that role have access only to the Providers that are required.
|
||||
Provider Groups control visibility across specific providers. When creating a new role, you can assign specific groups to define their Cloud Provider visibility. This ensures that users with that role have access only to the Cloud Providers that are required.
|
||||
|
||||
By default, a new user role does not have visibility into any group.
|
||||
|
||||
@@ -223,7 +223,7 @@ Assign administrative permissions by selecting from the following options:
|
||||
| Invite and Manage Users | All | Invite new users and manage existing ones. |
|
||||
| Manage Account | All | Adjust account settings, delete users and read/manage users permissions. |
|
||||
| Manage Scans | All | Run and review scans. |
|
||||
| Manage Providers | All | Add or modify connected providers. |
|
||||
| Manage Cloud Providers | All | Add or modify connected cloud providers. |
|
||||
| Manage Integrations | All | Add or modify the Prowler Integrations. |
|
||||
| Manage Ingestions | Prowler Cloud | Allow or deny the ability to submit findings ingestion batches via the API. |
|
||||
| Manage Billing | Prowler Cloud | Access and manage billing settings and subscription information. |
|
||||
|
||||
@@ -320,7 +320,7 @@ Once the required permissions are set up, proceed to configure the S3 integratio
|
||||

|
||||
4. Complete the configuration form with the following details:
|
||||
|
||||
- **Providers:** Select the providers whose scan results should be exported to this S3 bucket
|
||||
- **Cloud Providers:** Select the providers whose scan results should be exported to this S3 bucket
|
||||
- **Bucket Name:** Enter the name of the target S3 bucket (e.g., `my-security-findings-bucket`)
|
||||
- **Output Directory:** Specify the directory path within the bucket (e.g., `/prowler-findings/`, defaults to `output`)
|
||||
|
||||
|
||||
@@ -72,8 +72,8 @@ To perform security scans, link a cloud provider account. Prowler supports the f
|
||||
|
||||
Steps to add a provider:
|
||||
|
||||
1. Navigate to `Settings > Providers`.
|
||||
2. Click `Add Provider` to set up a new provider and provide your credentials.
|
||||
1. Navigate to `Settings > Cloud Providers`.
|
||||
2. Click `Add Account` to set up a new provider and provide your credentials.
|
||||
|
||||
<img src="/images/add-provider.png" alt="Add Provider" width="700" />
|
||||
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
---
|
||||
title: 'Prowler Check Kreator'
|
||||
---
|
||||
|
||||
<Note>
|
||||
Currently, this tool is only available for creating checks for the AWS provider.
|
||||
|
||||
</Note>
|
||||
|
||||
<Note>
|
||||
If you are looking for a way to create new checks for all the supported providers, you can use [Prowler Studio](https://github.com/prowler-cloud/prowler-studio), it is an AI-powered toolkit for generating and managing security checks for Prowler (better version of the Check Kreator).
|
||||
|
||||
</Note>
|
||||
|
||||
## Introduction
|
||||
|
||||
**Prowler Check Kreator** is a utility designed to streamline the creation of new checks for Prowler. This tool generates all necessary files required to add a new check to the Prowler repository. Specifically, it creates:
|
||||
|
||||
- A dedicated folder for the check.
|
||||
- The main check script.
|
||||
- A metadata file with essential details.
|
||||
- A folder and file structure for testing the check.
|
||||
|
||||
## Usage
|
||||
|
||||
To use the tool, execute the main script with the following command:
|
||||
|
||||
```bash
|
||||
python util/prowler_check_kreator/prowler_check_kreator.py <prowler_provider> <check_name>
|
||||
```
|
||||
|
||||
Parameters:
|
||||
|
||||
- `<prowler_provider>`: Currently only AWS is supported.
|
||||
- `<check_name>`: The name you wish to assign to the new check.
|
||||
|
||||
## AI integration
|
||||
|
||||
This tool optionally integrates AI to assist in generating the check code and metadata file content. When AI assistance is chosen, the tool uses [Gemini](https://gemini.google.com/) to produce preliminary code and metadata.
|
||||
|
||||
<Note>
|
||||
For this feature to work, you must have the library `google-generativeai` installed in your Python environment.
|
||||
|
||||
</Note>
|
||||
|
||||
<Warning>
|
||||
AI-generated code and metadata might contain errors or require adjustments to align with specific Prowler requirements. Carefully review all AI-generated content before committing.
|
||||
|
||||
</Warning>
|
||||
|
||||
To enable AI assistance, simply confirm when prompted by the tool. Additionally, ensure that the `GEMINI_API_KEY` environment variable is set with a valid Gemini API key. For instructions on obtaining your API key, refer to the [Gemini documentation](https://ai.google.dev/gemini-api/docs/api-key).
|
||||
@@ -246,10 +246,10 @@ Now that both roles are deployed — the management account role (Step 1) and th
|
||||
|
||||
### Open the Wizard
|
||||
|
||||
1. Navigate to **Providers** and click **Add Provider**.
|
||||
1. Navigate to **Cloud Providers** and click **Add Cloud Provider**.
|
||||
|
||||
<Frame>
|
||||
<img src="/images/organizations/cloud-providers-add.png" alt="Providers page showing the Add Provider button" />
|
||||
<img src="/images/organizations/cloud-providers-add.png" alt="Cloud Providers page showing the Add Cloud Provider button" />
|
||||
</Frame>
|
||||
|
||||
2. Select **Amazon Web Services** as the provider.
|
||||
|
||||
@@ -2,22 +2,6 @@
|
||||
|
||||
All notable changes to the **Prowler MCP Server** are documented in this file.
|
||||
|
||||
## [0.7.0] (Prowler UNRELEASED)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- `prowler_app_get_resource_events` now requests the API's `text/plain` representation and returns a markdown report [(#XXXXX)](https://github.com/prowler-cloud/prowler/pull/XXXXX)
|
||||
|
||||
### 🗑️ Removed
|
||||
|
||||
- `ResourceEvent`/`ResourceEventsResponse` models are removed since the tool no longer parses JSON:API for API `resources/{id}/events` [(#XXXXX)](https://github.com/prowler-cloud/prowler/pull/XXXXX)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- `cryptography` from 46.0.1 to 47.0.0 (transitive) for CVE-2026-39892 and CVE-2026-26007 / CVE-2026-34073 [(#10978)](https://github.com/prowler-cloud/prowler/pull/10978)
|
||||
|
||||
---
|
||||
|
||||
## [0.6.0] (Prowler v5.23.0)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
@@ -135,3 +135,48 @@ class ResourcesMetadataResponse(BaseModel):
|
||||
regions=attributes.get("regions"),
|
||||
types=attributes.get("types"),
|
||||
)
|
||||
|
||||
|
||||
class ResourceEvent(MinimalSerializerMixin, BaseModel):
|
||||
"""A cloud API action performed on a resource.
|
||||
|
||||
Sourced from cloud provider audit logs (AWS CloudTrail, Azure Activity Logs,
|
||||
GCP Audit Logs, etc.).
|
||||
"""
|
||||
|
||||
id: str
|
||||
event_time: str
|
||||
event_name: str
|
||||
event_source: str
|
||||
actor: str
|
||||
actor_uid: str | None = None
|
||||
actor_type: str | None = None
|
||||
source_ip_address: str | None = None
|
||||
user_agent: str | None = None
|
||||
request_data: dict | None = None
|
||||
response_data: dict | None = None
|
||||
error_code: str | None = None
|
||||
error_message: str | None = None
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "ResourceEvent":
|
||||
"""Transform JSON:API resource event response."""
|
||||
return cls(id=data["id"], **data.get("attributes", {}))
|
||||
|
||||
|
||||
class ResourceEventsResponse(BaseModel):
|
||||
"""Response wrapper for resource events list."""
|
||||
|
||||
events: list[ResourceEvent]
|
||||
total_events: int
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, response: dict) -> "ResourceEventsResponse":
|
||||
"""Transform JSON:API response to events list."""
|
||||
data = response.get("data", [])
|
||||
events = [ResourceEvent.from_api_response(item) for item in data]
|
||||
|
||||
return cls(
|
||||
events=events,
|
||||
total_events=len(events),
|
||||
)
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import Any
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.resources import (
|
||||
DetailedResource,
|
||||
ResourceEventsResponse,
|
||||
ResourcesListResponse,
|
||||
ResourcesMetadataResponse,
|
||||
)
|
||||
@@ -370,13 +371,12 @@ class ResourcesTools(BaseTool):
|
||||
IMPORTANT: Currently only available for AWS resources. Uses CloudTrail to retrieve
|
||||
the modification history of a resource, showing who did what and when.
|
||||
|
||||
Returns a markdown report (via the API's `Accept: text/plain` representation)
|
||||
with one section per event, each containing:
|
||||
- What happened: event_name (e.g., PutBucketPolicy), event source
|
||||
Each event includes:
|
||||
- What happened: event_name (e.g., PutBucketPolicy), event_source (e.g., s3.amazonaws.com)
|
||||
- Who did it: actor, actor_type, actor_uid
|
||||
- From where: source_ip_address, user_agent
|
||||
- What changed: request_data, response_data (the API call payloads)
|
||||
- Errors: error code and message when the action failed
|
||||
- What changed: request_data, response_data (full API payloads)
|
||||
- Errors: error_code, error_message (if the action failed)
|
||||
|
||||
Use cases:
|
||||
- Investigating security incidents (who modified this resource?)
|
||||
@@ -396,14 +396,9 @@ class ResourcesTools(BaseTool):
|
||||
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
|
||||
token = await self.api_client.auth_manager.get_valid_token()
|
||||
headers = self.api_client.auth_manager.get_headers(token)
|
||||
headers["Accept"] = "text/plain"
|
||||
response = await self.api_client.client.get(
|
||||
f"{self.api_client.auth_manager.base_url}/resources/{resource_id}/events",
|
||||
headers=headers,
|
||||
params=clean_params,
|
||||
api_response = await self.api_client.get(
|
||||
f"/resources/{resource_id}/events", params=clean_params
|
||||
)
|
||||
response.raise_for_status()
|
||||
events_response = ResourceEventsResponse.from_api_response(api_response)
|
||||
|
||||
return {"report": response.text}
|
||||
return events_response.model_dump()
|
||||
|
||||
@@ -11,7 +11,7 @@ description = "MCP server for Prowler ecosystem"
|
||||
name = "prowler-mcp"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
version = "0.7.0"
|
||||
version = "0.5.0"
|
||||
|
||||
[project.scripts]
|
||||
prowler-mcp = "prowler_mcp_server.main:main"
|
||||
|
||||
Generated
+47
-44
@@ -204,55 +204,58 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "47.0.0"
|
||||
version = "46.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ef/b2/7ffa7fe8207a8c42147ffe70c3e360b228160c1d85dc3faff16aaa3244c0/cryptography-47.0.0.tar.gz", hash = "sha256:9f8e55fe4e63613a5e1cc5819030f27b97742d720203a087802ce4ce9ceb52bb", size = 830863, upload-time = "2026-04-24T19:54:57.056Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a9/62/e3664e6ffd7743e1694b244dde70b43a394f6f7fbcacf7014a8ff5197c73/cryptography-46.0.1.tar.gz", hash = "sha256:ed570874e88f213437f5cf758f9ef26cbfc3f336d889b1e592ee11283bb8d1c7", size = 749198, upload-time = "2025-09-17T00:10:35.797Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/98/40dfe932134bdcae4f6ab5927c87488754bf9eb79297d7e0070b78dd58e9/cryptography-47.0.0-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:160ad728f128972d362e714054f6ba0067cab7fb350c5202a9ae8ae4ce3ef1a0", size = 7912214, upload-time = "2026-04-24T19:53:03.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/c6/2733531243fba725f58611b918056b277692f1033373dcc8bd01af1c05d4/cryptography-47.0.0-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b9a8943e359b7615db1a3ba587994618e094ff3d6fa5a390c73d079ce18b3973", size = 4644617, upload-time = "2026-04-24T19:53:06.909Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/e3/b27be1a670a9b87f855d211cf0e1174a5d721216b7616bd52d8581d912ed/cryptography-47.0.0-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f5c15764f261394b22aef6b00252f5195f46f2ca300bec57149474e2538b31f8", size = 4668186, upload-time = "2026-04-24T19:53:09.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/b9/8443cfe5d17d482d348cee7048acf502bb89a51b6382f06240fd290d4ca3/cryptography-47.0.0-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9c59ab0e0fa3a180a5a9c59f3a5abe3ef90d474bc56d7fadfbe80359491b615b", size = 4651244, upload-time = "2026-04-24T19:53:11.217Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/5e/13ed0cdd0eb88ba159d6dd5ebfece8cb901dbcf1ae5ac4072e28b55d3153/cryptography-47.0.0-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:34b4358b925a5ea3e14384ca781a2c0ef7ac219b57bb9eacc4457078e2b19f92", size = 5252906, upload-time = "2026-04-24T19:53:13.532Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/16/ed058e1df0f33d440217cd120d41d5dda9dd215a80b8187f68483185af82/cryptography-47.0.0-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0024b87d47ae2399165a6bfb20d24888881eeab83ae2566d62467c5ff0030ce7", size = 4701842, upload-time = "2026-04-24T19:53:15.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/e0/3d30986b30fdbd9e969abbdf8ba00ed0618615144341faeb57f395a084fe/cryptography-47.0.0-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:1e47422b5557bb82d3fff997e8d92cff4e28b9789576984f08c248d2b3535d93", size = 4289313, upload-time = "2026-04-24T19:53:17.755Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/fd/32db38e3ad0cb331f0691cb4c7a8a6f176f679124dee746b3af6633db4d9/cryptography-47.0.0-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:6f29f36582e6151d9686235e586dd35bb67491f024767d10b842e520dc6a07ac", size = 4650964, upload-time = "2026-04-24T19:53:20.062Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/53/5395d944dfd48cb1f67917f533c609c34347185ef15eb4308024c876f274/cryptography-47.0.0-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:a9b761f012a943b7de0e828843c5688d0de94a0578d44d6c85a1bae32f87791f", size = 5207817, upload-time = "2026-04-24T19:53:22.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/4f/e5711b28e1901f7d480a2b1b688b645aa4c77c73f10731ed17e7f7db3f0d/cryptography-47.0.0-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4e1de79e047e25d6e9f8cea71c86b4a53aced64134f0f003bbcbf3655fd172c8", size = 4701544, upload-time = "2026-04-24T19:53:24.356Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/22/c8ddc25de3010fc8da447648f5a092c40e7a8fadf01dd6d255d9c0b9373d/cryptography-47.0.0-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef6b3634087f18d2155b1e8ce264e5345a753da2c5fa9815e7d41315c90f8318", size = 4783536, upload-time = "2026-04-24T19:53:26.665Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/b6/d4a68f4ea999c6d89e8498579cba1c5fcba4276284de7773b17e4fa69293/cryptography-47.0.0-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:11dbb9f50a0f1bb9757b3d8c27c1101780efb8f0bdecfb12439c22a74d64c001", size = 4926106, upload-time = "2026-04-24T19:53:28.686Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/ed/5f524db1fade9c013aa618e1c99c6ed05e8ffc9ceee6cda22fed22dda3f4/cryptography-47.0.0-cp311-abi3-win32.whl", hash = "sha256:7fda2f02c9015db3f42bb8a22324a454516ed10a8c29ca6ece6cdbb5efe2a203", size = 3258581, upload-time = "2026-04-24T19:53:31.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/dc/1b901990b174786569029f67542b3edf72ac068b6c3c8683c17e6a2f5363/cryptography-47.0.0-cp311-abi3-win_amd64.whl", hash = "sha256:f5c3296dab66202f1b18a91fa266be93d6aa0c2806ea3d67762c69f60adc71aa", size = 3775309, upload-time = "2026-04-24T19:53:33.054Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/88/7aa18ad9c11bc87689affa5ce4368d884b517502d75739d475fc6f4a03c7/cryptography-47.0.0-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:be12cb6a204f77ed968bcefe68086eb061695b540a3dd05edac507a3111b25f0", size = 7904299, upload-time = "2026-04-24T19:53:35.003Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/55/c18f75724544872f234678fdedc871391722cb34a2aee19faa9f63100bb2/cryptography-47.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2ebd84adf0728c039a3be2700289378e1c164afc6748df1a5ed456767bef9ba7", size = 4631180, upload-time = "2026-04-24T19:53:37.517Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/65/31a5cc0eaca99cec5bafffe155d407115d96136bb161e8b49e0ef73f09a7/cryptography-47.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7f68d6fbc7fbbcfb0939fea72c3b96a9f9a6edfc0e1b1d29778a2066030418b1", size = 4653529, upload-time = "2026-04-24T19:53:39.775Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/bc/641c0519a495f3bfd0421b48d7cd325c4336578523ccd76ea322b6c29c7a/cryptography-47.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:6651d32eff255423503aa276739da98c30f26c40cbeffcc6048e0d54ef704c0c", size = 4638570, upload-time = "2026-04-24T19:53:42.129Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/f2/300327b0a47f6dc94dd8b71b57052aefe178bb51745073d73d80604f11ab/cryptography-47.0.0-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3fb8fa48075fad7193f2e5496135c6a76ac4b2aa5a38433df0a539296b377829", size = 5238019, upload-time = "2026-04-24T19:53:44.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/5a/5b5cf994391d4bf9d9c7efd4c66aabe4d95227256627f8fea6cff7dfadbd/cryptography-47.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:11438c7518132d95f354fa01a4aa2f806d172a061a7bed18cf18cbdacdb204d7", size = 4686832, upload-time = "2026-04-24T19:53:47.015Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/2c/ae950e28fd6475c852fc21a44db3e6b5bcc1261d1e370f2b6e42fa800fef/cryptography-47.0.0-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8c1a736bbb3288005796c3f7ccb9453360d7fed483b13b9f468aea5171432923", size = 4269301, upload-time = "2026-04-24T19:53:48.97Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/fb/6a39782e150ffe5cc1b0018cb6ddc48bf7ca62b498d7539ffc8a758e977d/cryptography-47.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:f1557695e5c2b86e204f6ce9470497848634100787935ab7adc5397c54abd7ab", size = 4638110, upload-time = "2026-04-24T19:53:51.011Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/d7/0b3c71090a76e5c203164a47688b697635ece006dcd2499ab3a4dbd3f0bd/cryptography-47.0.0-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:f9a034b642b960767fb343766ae5ba6ad653f2e890ddd82955aef288ffea8736", size = 5194988, upload-time = "2026-04-24T19:53:52.962Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/33/63a961498a9df51721ab578c5a2622661411fc520e00bd83b0cc64eb20c4/cryptography-47.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:b1c76fca783aa7698eb21eb14f9c4aa09452248ee54a627d125025a43f83e7a7", size = 4686563, upload-time = "2026-04-24T19:53:55.274Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/bf/5ee5b145248f92250de86145d1c1d6edebbd57a7fe7caa4dedb5d4cf06a1/cryptography-47.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4f7722c97826770bab8ae92959a2e7b20a5e9e9bf4deae68fd86c3ca457bab52", size = 4770094, upload-time = "2026-04-24T19:53:57.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/43/21d220b2da5d517773894dacdcdb5c682c28d3fffce65548cb06e87d5501/cryptography-47.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:09f6d7bf6724f8db8b32f11eccf23efc8e759924bc5603800335cf8859a3ddbd", size = 4913811, upload-time = "2026-04-24T19:54:00.236Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/98/dc4ad376ac5f1a1a7d4a83f7b0c6f2bcad36b5d2d8f30aeb482d3a7d9582/cryptography-47.0.0-cp314-cp314t-win32.whl", hash = "sha256:6eebcaf0df1d21ce1f90605c9b432dd2c4f4ab665ac29a40d5e3fc68f51b5e63", size = 3237158, upload-time = "2026-04-24T19:54:02.606Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/da/97f62d18306b5133468bc3f8cc73a3111e8cdc8cf8d3e69474d6e5fd2d1b/cryptography-47.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:51c9313e90bd1690ec5a75ed047c27c0b8e6c570029712943d6116ef9a90620b", size = 3758706, upload-time = "2026-04-24T19:54:04.433Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/34/a4fae8ae7c3bc227460c9ae43f56abf1b911da0ec29e0ebac53bb0a4b6b7/cryptography-47.0.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:14432c8a9bcb37009784f9594a62fae211a2ae9543e96c92b2a8e4c3cd5cd0c4", size = 7904072, upload-time = "2026-04-24T19:54:06.411Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/64/d7b1e54fdb69f22d24a64bb3e88dc718b31c7fb10ef0b9691a3cf7eeea6e/cryptography-47.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:07efe86201817e7d3c18781ca9770bc0db04e1e48c994be384e4602bc38f8f27", size = 4635767, upload-time = "2026-04-24T19:54:08.519Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/7b/cca826391fb2a94efdcdfe4631eb69306ee1cff0b22f664a412c90713877/cryptography-47.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2b45761c6ec22b7c726d6a829558777e32d0f1c8be7c3f3480f9c912d5ee8a10", size = 4654350, upload-time = "2026-04-24T19:54:10.795Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/65/4b57bcc823f42a991627c51c2f68c9fd6eb1393c1756aac876cba2accae2/cryptography-47.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:edd4da498015da5b9f26d38d3bfc2e90257bfa9cbed1f6767c282a0025ae649b", size = 4643394, upload-time = "2026-04-24T19:54:13.275Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/c4/2c5fbeea70adbbca2bbae865e1d605d6a4a7f8dbd9d33eaf69645087f06c/cryptography-47.0.0-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:9af828c0d5a65c70ec729cd7495a4bf1a67ecb66417b8f02ff125ab8a6326a74", size = 5225777, upload-time = "2026-04-24T19:54:15.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/b8/ac57107ef32749d2b244e36069bb688792a363aaaa3acc9e3cf84c130315/cryptography-47.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:256d07c78a04d6b276f5df935a9923275f53bd1522f214447fdf365494e2d515", size = 4688771, upload-time = "2026-04-24T19:54:17.835Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/fc/9f1de22ff8be99d991f240a46863c52d475404c408886c5a38d2b5c3bb26/cryptography-47.0.0-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:5d0e362ff51041b0c0d219cc7d6924d7b8996f57ce5712bdcef71eb3c65a59cc", size = 4270753, upload-time = "2026-04-24T19:54:19.963Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/68/d70c852797aa68e8e48d12e5a87170c43f67bb4a59403627259dd57d15de/cryptography-47.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:1581aef4219f7ca2849d0250edaa3866212fb74bf5667284f46aa92f9e65c1ca", size = 4642911, upload-time = "2026-04-24T19:54:21.818Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/51/661cbee74f594c5d97ff82d34f10d5551c085ca4668645f4606ebd22bd5d/cryptography-47.0.0-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:a49a3eb5341b9503fa3000a9a0db033161db90d47285291f53c2a9d2cd1b7f76", size = 5181411, upload-time = "2026-04-24T19:54:24.376Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/87/f2b6c374a82cf076cfa1416992ac8e8ec94d79facc37aec87c1a5cb72352/cryptography-47.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:2207a498b03275d0051589e326b79d4cf59985c99031b05bb292ac52631c37fe", size = 4688262, upload-time = "2026-04-24T19:54:26.946Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/e2/8b7462f4acf21ec509616f0245018bb197194ab0b65c2ea21a0bdd53c0eb/cryptography-47.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7a02675e2fabd0c0fc04c868b8781863cbf1967691543c22f5470500ff840b31", size = 4775506, upload-time = "2026-04-24T19:54:28.926Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/75/158e494e4c08dc05e039da5bb48553826bd26c23930cf8d3cd5f21fa8921/cryptography-47.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80887c5cbd1774683cb126f0ab4184567f080071d5acf62205acb354b4b753b7", size = 4912060, upload-time = "2026-04-24T19:54:30.869Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/bd/0a9d3edbf5eadbac926d7b9b3cd0c4be584eeeae4a003d24d9eda4affbbd/cryptography-47.0.0-cp38-abi3-win32.whl", hash = "sha256:ed67ea4e0cfb5faa5bc7ecb6e2b8838f3807a03758eec239d6c21c8769355310", size = 3248487, upload-time = "2026-04-24T19:54:33.494Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/80/5681af756d0da3a599b7bdb586fac5a1540f1bcefd2717a20e611ddade45/cryptography-47.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:835d2d7f47cdc53b3224e90810fb1d36ca94ea29cc1801fb4c1bc43876735769", size = 3755737, upload-time = "2026-04-24T19:54:35.408Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/8c/44ee01267ec01e26e43ebfdae3f120ec2312aa72fa4c0507ebe41a26739f/cryptography-46.0.1-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:1cd6d50c1a8b79af1a6f703709d8973845f677c8e97b1268f5ff323d38ce8475", size = 7285044, upload-time = "2025-09-17T00:08:36.807Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/59/9ae689a25047e0601adfcb159ec4f83c0b4149fdb5c3030cc94cd218141d/cryptography-46.0.1-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0ff483716be32690c14636e54a1f6e2e1b7bf8e22ca50b989f88fa1b2d287080", size = 4308182, upload-time = "2025-09-17T00:08:39.388Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/ee/ca6cc9df7118f2fcd142c76b1da0f14340d77518c05b1ebfbbabca6b9e7d/cryptography-46.0.1-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9873bf7c1f2a6330bdfe8621e7ce64b725784f9f0c3a6a55c3047af5849f920e", size = 4572393, upload-time = "2025-09-17T00:08:41.663Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/a3/0f5296f63815d8e985922b05c31f77ce44787b3127a67c0b7f70f115c45f/cryptography-46.0.1-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0dfb7c88d4462a0cfdd0d87a3c245a7bc3feb59de101f6ff88194f740f72eda6", size = 4308400, upload-time = "2025-09-17T00:08:43.559Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/8c/74fcda3e4e01be1d32775d5b4dd841acaac3c1b8fa4d0774c7ac8d52463d/cryptography-46.0.1-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e22801b61613ebdebf7deb18b507919e107547a1d39a3b57f5f855032dd7cfb8", size = 4015786, upload-time = "2025-09-17T00:08:45.758Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/b8/85d23287baeef273b0834481a3dd55bbed3a53587e3b8d9f0898235b8f91/cryptography-46.0.1-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:757af4f6341ce7a1e47c326ca2a81f41d236070217e5fbbad61bbfe299d55d28", size = 4982606, upload-time = "2025-09-17T00:08:47.602Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/d3/de61ad5b52433b389afca0bc70f02a7a1f074651221f599ce368da0fe437/cryptography-46.0.1-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f7a24ea78de345cfa7f6a8d3bde8b242c7fac27f2bd78fa23474ca38dfaeeab9", size = 4604234, upload-time = "2025-09-17T00:08:49.879Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/1f/dbd4d6570d84748439237a7478d124ee0134bf166ad129267b7ed8ea6d22/cryptography-46.0.1-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e8776dac9e660c22241b6587fae51a67b4b0147daa4d176b172c3ff768ad736", size = 4307669, upload-time = "2025-09-17T00:08:52.321Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/fd/ca0a14ce7f0bfe92fa727aacaf2217eb25eb7e4ed513b14d8e03b26e63ed/cryptography-46.0.1-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9f40642a140c0c8649987027867242b801486865277cbabc8c6059ddef16dc8b", size = 4947579, upload-time = "2025-09-17T00:08:54.697Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/6b/09c30543bb93401f6f88fce556b3bdbb21e55ae14912c04b7bf355f5f96c/cryptography-46.0.1-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:449ef2b321bec7d97ef2c944173275ebdab78f3abdd005400cc409e27cd159ab", size = 4603669, upload-time = "2025-09-17T00:08:57.16Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/9a/38cb01cb09ce0adceda9fc627c9cf98eb890fc8d50cacbe79b011df20f8a/cryptography-46.0.1-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2dd339ba3345b908fa3141ddba4025568fa6fd398eabce3ef72a29ac2d73ad75", size = 4435828, upload-time = "2025-09-17T00:08:59.606Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/53/435b5c36a78d06ae0bef96d666209b0ecd8f8181bfe4dda46536705df59e/cryptography-46.0.1-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7411c910fb2a412053cf33cfad0153ee20d27e256c6c3f14d7d7d1d9fec59fd5", size = 4709553, upload-time = "2025-09-17T00:09:01.832Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/c4/0da6e55595d9b9cd3b6eb5dc22f3a07ded7f116a3ea72629cab595abb804/cryptography-46.0.1-cp311-abi3-win32.whl", hash = "sha256:cbb8e769d4cac884bb28e3ff620ef1001b75588a5c83c9c9f1fdc9afbe7f29b0", size = 3058327, upload-time = "2025-09-17T00:09:03.726Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/0f/cd29a35e0d6e78a0ee61793564c8cff0929c38391cb0de27627bdc7525aa/cryptography-46.0.1-cp311-abi3-win_amd64.whl", hash = "sha256:92e8cfe8bd7dd86eac0a677499894862cd5cc2fd74de917daa881d00871ac8e7", size = 3523893, upload-time = "2025-09-17T00:09:06.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/dd/eea390f3e78432bc3d2f53952375f8b37cb4d37783e626faa6a51e751719/cryptography-46.0.1-cp311-abi3-win_arm64.whl", hash = "sha256:db5597a4c7353b2e5fb05a8e6cb74b56a4658a2b7bf3cb6b1821ae7e7fd6eaa0", size = 2932145, upload-time = "2025-09-17T00:09:08.568Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/fb/c73588561afcd5e24b089952bd210b14676c0c5bf1213376350ae111945c/cryptography-46.0.1-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:4c49eda9a23019e11d32a0eb51a27b3e7ddedde91e099c0ac6373e3aacc0d2ee", size = 7193928, upload-time = "2025-09-17T00:09:10.595Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/34/0ff0bb2d2c79f25a2a63109f3b76b9108a906dd2a2eb5c1d460b9938adbb/cryptography-46.0.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9babb7818fdd71394e576cf26c5452df77a355eac1a27ddfa24096665a27f8fd", size = 4293515, upload-time = "2025-09-17T00:09:12.861Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/b7/d4f848aee24ecd1be01db6c42c4a270069a4f02a105d9c57e143daf6cf0f/cryptography-46.0.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9f2c4cc63be3ef43c0221861177cee5d14b505cd4d4599a89e2cd273c4d3542a", size = 4545619, upload-time = "2025-09-17T00:09:15.397Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/a5/42fedefc754fd1901e2d95a69815ea4ec8a9eed31f4c4361fcab80288661/cryptography-46.0.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:41c281a74df173876da1dc9a9b6953d387f06e3d3ed9284e3baae3ab3f40883a", size = 4299160, upload-time = "2025-09-17T00:09:17.155Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/a1/cd21174f56e769c831fbbd6399a1b7519b0ff6280acec1b826d7b072640c/cryptography-46.0.1-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0a17377fa52563d730248ba1f68185461fff36e8bc75d8787a7dd2e20a802b7a", size = 3994491, upload-time = "2025-09-17T00:09:18.971Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/2f/a8cbfa1c029987ddc746fd966711d4fa71efc891d37fbe9f030fe5ab4eec/cryptography-46.0.1-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:0d1922d9280e08cde90b518a10cd66831f632960a8d08cb3418922d83fce6f12", size = 4960157, upload-time = "2025-09-17T00:09:20.923Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/ae/63a84e6789e0d5a2502edf06b552bcb0fa9ff16147265d5c44a211942abe/cryptography-46.0.1-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:af84e8e99f1a82cea149e253014ea9dc89f75b82c87bb6c7242203186f465129", size = 4577263, upload-time = "2025-09-17T00:09:23.356Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/8f/1b9fa8e92bd9cbcb3b7e1e593a5232f2c1e6f9bd72b919c1a6b37d315f92/cryptography-46.0.1-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:ef648d2c690703501714588b2ba640facd50fd16548133b11b2859e8655a69da", size = 4298703, upload-time = "2025-09-17T00:09:25.566Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/af/bb95db070e73fea3fae31d8a69ac1463d89d1c084220f549b00dd01094a8/cryptography-46.0.1-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:e94eb5fa32a8a9f9bf991f424f002913e3dd7c699ef552db9b14ba6a76a6313b", size = 4926363, upload-time = "2025-09-17T00:09:27.451Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/3b/d8fb17ffeb3a83157a1cc0aa5c60691d062aceecba09c2e5e77ebfc1870c/cryptography-46.0.1-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:534b96c0831855e29fc3b069b085fd185aa5353033631a585d5cd4dd5d40d657", size = 4576958, upload-time = "2025-09-17T00:09:29.924Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/46/86bc3a05c10c8aa88c8ae7e953a8b4e407c57823ed201dbcba55c4d655f4/cryptography-46.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9b55038b5c6c47559aa33626d8ecd092f354e23de3c6975e4bb205df128a2a0", size = 4422507, upload-time = "2025-09-17T00:09:32.222Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/4e/387e5a21dfd2b4198e74968a541cfd6128f66f8ec94ed971776e15091ac3/cryptography-46.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ec13b7105117dbc9afd023300fb9954d72ca855c274fe563e72428ece10191c0", size = 4683964, upload-time = "2025-09-17T00:09:34.118Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/a3/f9f5907b166adb8f26762071474b38bbfcf89858a5282f032899075a38a1/cryptography-46.0.1-cp314-cp314t-win32.whl", hash = "sha256:504e464944f2c003a0785b81668fe23c06f3b037e9cb9f68a7c672246319f277", size = 3029705, upload-time = "2025-09-17T00:09:36.381Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/66/4d3a4f1850db2e71c2b1628d14b70b5e4c1684a1bd462f7fffb93c041c38/cryptography-46.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c52fded6383f7e20eaf70a60aeddd796b3677c3ad2922c801be330db62778e05", size = 3502175, upload-time = "2025-09-17T00:09:38.261Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/c7/9f10ad91435ef7d0d99a0b93c4360bea3df18050ff5b9038c489c31ac2f5/cryptography-46.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:9495d78f52c804b5ec8878b5b8c7873aa8e63db9cd9ee387ff2db3fffe4df784", size = 2912354, upload-time = "2025-09-17T00:09:40.078Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/e5/fbd632385542a3311915976f88e0dfcf09e62a3fc0aff86fb6762162a24d/cryptography-46.0.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:d84c40bdb8674c29fa192373498b6cb1e84f882889d21a471b45d1f868d8d44b", size = 7255677, upload-time = "2025-09-17T00:09:42.407Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/3e/13ce6eab9ad6eba1b15a7bd476f005a4c1b3f299f4c2f32b22408b0edccf/cryptography-46.0.1-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9ed64e5083fa806709e74fc5ea067dfef9090e5b7a2320a49be3c9df3583a2d8", size = 4301110, upload-time = "2025-09-17T00:09:45.614Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/67/65dc233c1ddd688073cf7b136b06ff4b84bf517ba5529607c9d79720fc67/cryptography-46.0.1-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:341fb7a26bc9d6093c1b124b9f13acc283d2d51da440b98b55ab3f79f2522ead", size = 4562369, upload-time = "2025-09-17T00:09:47.601Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/db/d64ae4c6f4e98c3dac5bf35dd4d103f4c7c345703e43560113e5e8e31b2b/cryptography-46.0.1-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6ef1488967e729948d424d09c94753d0167ce59afba8d0f6c07a22b629c557b2", size = 4302126, upload-time = "2025-09-17T00:09:49.335Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/19/5f1eea17d4805ebdc2e685b7b02800c4f63f3dd46cfa8d4c18373fea46c8/cryptography-46.0.1-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7823bc7cdf0b747ecfb096d004cc41573c2f5c7e3a29861603a2871b43d3ef32", size = 4009431, upload-time = "2025-09-17T00:09:51.239Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/b5/229ba6088fe7abccbfe4c5edb96c7a5ad547fac5fdd0d40aa6ea540b2985/cryptography-46.0.1-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:f736ab8036796f5a119ff8211deda416f8c15ce03776db704a7a4e17381cb2ef", size = 4980739, upload-time = "2025-09-17T00:09:54.181Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/9c/50aa38907b201e74bc43c572f9603fa82b58e831bd13c245613a23cff736/cryptography-46.0.1-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:e46710a240a41d594953012213ea8ca398cd2448fbc5d0f1be8160b5511104a0", size = 4592289, upload-time = "2025-09-17T00:09:56.731Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/33/229858f8a5bb22f82468bb285e9f4c44a31978d5f5830bb4ea1cf8a4e454/cryptography-46.0.1-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:84ef1f145de5aee82ea2447224dc23f065ff4cc5791bb3b506615957a6ba8128", size = 4301815, upload-time = "2025-09-17T00:09:58.548Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/cb/b76b2c87fbd6ed4a231884bea3ce073406ba8e2dae9defad910d33cbf408/cryptography-46.0.1-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9394c7d5a7565ac5f7d9ba38b2617448eba384d7b107b262d63890079fad77ca", size = 4943251, upload-time = "2025-09-17T00:10:00.475Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/0f/f66125ecf88e4cb5b8017ff43f3a87ede2d064cb54a1c5893f9da9d65093/cryptography-46.0.1-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ed957044e368ed295257ae3d212b95456bd9756df490e1ac4538857f67531fcc", size = 4591247, upload-time = "2025-09-17T00:10:02.874Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/22/9f3134ae436b63b463cfdf0ff506a0570da6873adb4bf8c19b8a5b4bac64/cryptography-46.0.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f7de12fa0eee6234de9a9ce0ffcfa6ce97361db7a50b09b65c63ac58e5f22fc7", size = 4428534, upload-time = "2025-09-17T00:10:04.994Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/39/e6042bcb2638650b0005c752c38ea830cbfbcbb1830e4d64d530000aa8dc/cryptography-46.0.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7fab1187b6c6b2f11a326f33b036f7168f5b996aedd0c059f9738915e4e8f53a", size = 4699541, upload-time = "2025-09-17T00:10:06.925Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/68/46/753d457492d15458c7b5a653fc9a84a1c9c7a83af6ebdc94c3fc373ca6e8/cryptography-46.0.1-cp38-abi3-win32.whl", hash = "sha256:45f790934ac1018adeba46a0f7289b2b8fe76ba774a88c7f1922213a56c98bc1", size = 3043779, upload-time = "2025-09-17T00:10:08.951Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/50/b6f3b540c2f6ee712feeb5fa780bb11fad76634e71334718568e7695cb55/cryptography-46.0.1-cp38-abi3-win_amd64.whl", hash = "sha256:7176a5ab56fac98d706921f6416a05e5aff7df0e4b91516f450f8627cda22af3", size = 3517226, upload-time = "2025-09-17T00:10:10.769Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/e8/77d17d00981cdd27cc493e81e1749a0b8bbfb843780dbd841e30d7f50743/cryptography-46.0.1-cp38-abi3-win_arm64.whl", hash = "sha256:efc9e51c3e595267ff84adf56e9b357db89ab2279d7e375ffcaf8f678606f3d9", size = 2923149, upload-time = "2025-09-17T00:10:13.236Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
Generated
+60
-60
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.3.4 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -2029,61 +2029,61 @@ toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.7"
|
||||
version = "46.0.6"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457"},
|
||||
{file = "cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298"},
|
||||
{file = "cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246"},
|
||||
{file = "cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fc9ab8856ae6cf7c9358430e49b368f3108f050031442eaeb6b9d87e4dcf4e4f"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d3b99c535a9de0adced13d159c5a9cf65c325601aa30f4be08afd680643e9c15"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d02c738dacda7dc2a74d1b2b3177042009d5cab7c7079db74afc19e56ca1b455"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:04959522f938493042d595a736e7dbdff6eb6cc2339c11465b3ff89343b65f65"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3986ac1dee6def53797289999eabe84798ad7817f3e97779b5061a95b0ee4968"},
|
||||
{file = "cryptography-46.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:258514877e15963bd43b558917bc9f54cf7cf866c38aa576ebf47a77ddbc43a4"},
|
||||
{file = "cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e"},
|
||||
{file = "cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2097,7 +2097,7 @@ nox = ["nox[uv] (>=2024.4.15)"]
|
||||
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
|
||||
sdist = ["build (>=1.0.0)"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.7)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.6)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
@@ -5445,25 +5445,25 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.33.1"
|
||||
version = "2.32.4"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a"},
|
||||
{file = "requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517"},
|
||||
{file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"},
|
||||
{file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2023.5.7"
|
||||
certifi = ">=2017.4.17"
|
||||
charset_normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.26,<3"
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<8)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "requests-file"
|
||||
@@ -6735,4 +6735,4 @@ files = [
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "d7e2ad41783a864bb845f63ccc10c88ae1e4ac36d61993ea106bbb4a5f58a843"
|
||||
content-hash = "09ce4507a464b318702ed8c6a738f3bb1bc4cc6ff5a50a9c2884f560af9ab034"
|
||||
|
||||
+2
-24
@@ -9,45 +9,23 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- `bedrock_guardrails_configured` check for AWS provider [(#10844)](https://github.com/prowler-cloud/prowler/pull/10844)
|
||||
- Universal compliance pipeline integrated into the CLI: `--list-compliance` and `--list-compliance-requirements` show universal frameworks, and CSV plus OCSF outputs are generated for any framework declaring a `TableConfig` [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301)
|
||||
- ASD Essential Eight Maturity Model compliance framework for AWS (Maturity Level One, Nov 2023) [(#10808)](https://github.com/prowler-cloud/prowler/pull/10808)
|
||||
- Update Vercel checks to return personalized finding status extended depending on billing plan and classify them with billing-plan categories [(#10663)](https://github.com/prowler-cloud/prowler/pull/10663)
|
||||
- `bedrock_prompt_management_exists` check for AWS provider [(#10878)](https://github.com/prowler-cloud/prowler/pull/10878)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- `route53_dangling_ip_subdomain_takeover` now also flags `CNAME` records pointing to S3 website endpoints whose buckets are missing from the account [(#10920)](https://github.com/prowler-cloud/prowler/pull/10920)
|
||||
- Azure Network Watcher flow log checks now require workspace-backed Traffic Analytics for `network_flow_log_captured_sent` and align metadata with VNet-compatible flow log guidance [(#10645)](https://github.com/prowler-cloud/prowler/pull/10645)
|
||||
- Azure compliance entries for legacy Network Watcher flow log controls now use retirement-aware guidance and point new deployments to VNet flow logs [(#10937)](https://github.com/prowler-cloud/prowler/pull/10937)
|
||||
- Azure compliance entries for legacy Network Watcher flow log controls now use retirement-aware guidance and point new deployments to VNet flow logs
|
||||
- AWS CodeBuild service now batches `BatchGetProjects` and `BatchGetBuilds` calls per region (up to 100 items per call) to reduce API call volume and prevent throttling-induced false positives in `codebuild_project_not_publicly_accessible` [(#10639)](https://github.com/prowler-cloud/prowler/pull/10639)
|
||||
- `display_compliance_table` dispatch switched from substring `in` checks to `startswith` to prevent false matches between similarly named frameworks (e.g. `cisa` vs `cis`) [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301)
|
||||
- Restore the `ec2-imdsv1` category for EC2 IMDS checks to keep Attack Surface and findings filters aligned [(#10998)](https://github.com/prowler-cloud/prowler/pull/10998)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- AWS SDK test isolation: autouse `mock_aws` fixture and leak detector in `conftest.py` to prevent tests from hitting real AWS endpoints, with idempotent organization setup for tests calling `set_mocked_aws_provider` multiple times [(#10605)](https://github.com/prowler-cloud/prowler/pull/10605)
|
||||
- AWS `boto` user agent extra is now applied to every client [(#10944)](https://github.com/prowler-cloud/prowler/pull/10944)
|
||||
- Image provider connection check no longer fails with a misleading `host='https'` resolution error when the registry URL includes an `http://` or `https://` scheme prefix [(#10950)](https://github.com/prowler-cloud/prowler/pull/10950)
|
||||
- Azure subscriptions sharing the same display name are no longer collapsed into a single identity entry, so every subscription is scanned [(#10718)](https://github.com/prowler-cloud/prowler/pull/10718)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- Parser-mismatch SSRF in image provider registry auth where crafted bearer-token realms and pagination links could force requests to internal addresses and leak credentials cross-origin [(#10945)](https://github.com/prowler-cloud/prowler/pull/10945)
|
||||
- `cryptography` from 46.0.6 to 46.0.7 and `trivy` binary from 0.69.2 to 0.70.0 in the SDK image for CVE-2026-39892 and CVE-2026-33186 [(#10978)](https://github.com/prowler-cloud/prowler/pull/10978)
|
||||
|
||||
## [5.25.3] (Prowler UNRELEASED)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Oracle cloud identity scans now scan known or supplied regions to better support non ashburn tenancies [(#10529)](https://github.com/prowler-cloud/prowler/pull/10529)
|
||||
|
||||
---
|
||||
|
||||
## [5.25.2] (Prowler v5.25.2)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `route53_dangling_ip_subdomain_takeover` now also flags `CNAME` records pointing to S3 website endpoints whose buckets are missing from the account [(#10920)](https://github.com/prowler-cloud/prowler/pull/10920)
|
||||
- Duplicate Kubernetes RBAC findings when the same User or Group subject appeared in multiple ClusterRoleBindings [(#10242)](https://github.com/prowler-cloud/prowler/pull/10242)
|
||||
- Match K8s RBAC rules by `apiGroup` [(#10969)](https://github.com/prowler-cloud/prowler/pull/10969)
|
||||
- Return a compact actor name from CloudTrail `userIdentity` events [(#10986)](https://github.com/prowler-cloud/prowler/pull/10986)
|
||||
|
||||
---
|
||||
|
||||
|
||||
+7
-7
@@ -57,9 +57,6 @@ from prowler.lib.check.models import CheckMetadata
|
||||
from prowler.lib.cli.parser import ProwlerArgumentParser
|
||||
from prowler.lib.logger import logger, set_logging_config
|
||||
from prowler.lib.outputs.asff.asff import ASFF
|
||||
from prowler.lib.outputs.compliance.asd_essential_eight.asd_essential_eight_aws import (
|
||||
ASDEssentialEightAWS,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
|
||||
AWSWellArchitected,
|
||||
)
|
||||
@@ -93,6 +90,9 @@ from prowler.lib.outputs.compliance.csa.csa_oraclecloud import OracleCloudCSA
|
||||
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
|
||||
from prowler.lib.outputs.compliance.ens.ens_azure import AzureENS
|
||||
from prowler.lib.outputs.compliance.ens.ens_gcp import GCPENS
|
||||
from prowler.lib.outputs.compliance.essential_eight.essential_eight_aws import (
|
||||
EssentialEightAWS,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_azure import AzureISO27001
|
||||
@@ -676,18 +676,18 @@ def prowler():
|
||||
)
|
||||
generated_outputs["compliance"].append(cis)
|
||||
cis.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("asd_essential_eight"):
|
||||
elif compliance_name.startswith("essential_eight"):
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
asd_essential_eight = ASDEssentialEightAWS(
|
||||
essential_eight = EssentialEightAWS(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(asd_essential_eight)
|
||||
asd_essential_eight.batch_write_data_to_file()
|
||||
generated_outputs["compliance"].append(essential_eight)
|
||||
essential_eight.batch_write_data_to_file()
|
||||
elif compliance_name == "mitre_attack_aws":
|
||||
# Generate MITRE ATT&CK Finding Object
|
||||
filename = (
|
||||
|
||||
+1
-1
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"Framework": "ASD-Essential-Eight",
|
||||
"Framework": "Essential-Eight",
|
||||
"Name": "ASD Essential Eight Maturity Model - Maturity Level One (AWS)",
|
||||
"Version": "Nov 2023",
|
||||
"Provider": "AWS",
|
||||
@@ -2897,7 +2897,6 @@
|
||||
"bedrock_guardrails_configured",
|
||||
"bedrock_model_invocation_logging_enabled",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_management_exists",
|
||||
"cloudformation_stack_outputs_find_secrets",
|
||||
"cloudfront_distributions_custom_ssl_certificate",
|
||||
"cloudfront_distributions_default_root_object",
|
||||
|
||||
@@ -2901,7 +2901,6 @@
|
||||
"bedrock_guardrails_configured",
|
||||
"bedrock_model_invocation_logging_enabled",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_management_exists",
|
||||
"cloudformation_stack_outputs_find_secrets",
|
||||
"cloudfront_distributions_custom_ssl_certificate",
|
||||
"cloudfront_distributions_default_root_object",
|
||||
|
||||
@@ -749,11 +749,8 @@ def execute(
|
||||
if global_provider.type == "cloudflare":
|
||||
is_finding_muted_args["account_id"] = finding.account_id
|
||||
if global_provider.type == "azure":
|
||||
is_finding_muted_args["subscription_id"] = finding.subscription
|
||||
is_finding_muted_args["subscription_name"] = (
|
||||
global_provider.identity.subscriptions.get(
|
||||
finding.subscription, finding.subscription
|
||||
)
|
||||
is_finding_muted_args["subscription_id"] = (
|
||||
global_provider.identity.subscriptions.get(finding.subscription)
|
||||
)
|
||||
is_finding_muted_args["finding"] = finding
|
||||
finding.muted = global_provider.mutelist.is_finding_muted(
|
||||
|
||||
@@ -102,7 +102,7 @@ class CIS_Requirement_Attribute(BaseModel):
|
||||
References: str
|
||||
|
||||
|
||||
class ASDEssentialEight_Requirement_Attribute_MaturityLevel(str, Enum):
|
||||
class EssentialEight_Requirement_Attribute_MaturityLevel(str, Enum):
|
||||
"""ASD Essential Eight Maturity Level"""
|
||||
|
||||
ML1 = "ML1"
|
||||
@@ -110,14 +110,14 @@ class ASDEssentialEight_Requirement_Attribute_MaturityLevel(str, Enum):
|
||||
ML3 = "ML3"
|
||||
|
||||
|
||||
class ASDEssentialEight_Requirement_Attribute_AssessmentStatus(str, Enum):
|
||||
class EssentialEight_Requirement_Attribute_AssessmentStatus(str, Enum):
|
||||
"""Essential Eight Requirement Attribute Assessment Status"""
|
||||
|
||||
Manual = "Manual"
|
||||
Automated = "Automated"
|
||||
|
||||
|
||||
class ASDEssentialEight_Requirement_Attribute_CloudApplicability(str, Enum):
|
||||
class EssentialEight_Requirement_Attribute_CloudApplicability(str, Enum):
|
||||
"""How well the ASD control maps to AWS cloud infrastructure."""
|
||||
|
||||
Full = "full"
|
||||
@@ -127,13 +127,13 @@ class ASDEssentialEight_Requirement_Attribute_CloudApplicability(str, Enum):
|
||||
|
||||
|
||||
# Essential Eight Requirement Attribute
|
||||
class ASDEssentialEight_Requirement_Attribute(BaseModel):
|
||||
class EssentialEight_Requirement_Attribute(BaseModel):
|
||||
"""ASD Essential Eight Requirement Attribute"""
|
||||
|
||||
Section: str
|
||||
MaturityLevel: ASDEssentialEight_Requirement_Attribute_MaturityLevel
|
||||
AssessmentStatus: ASDEssentialEight_Requirement_Attribute_AssessmentStatus
|
||||
CloudApplicability: ASDEssentialEight_Requirement_Attribute_CloudApplicability
|
||||
MaturityLevel: EssentialEight_Requirement_Attribute_MaturityLevel
|
||||
AssessmentStatus: EssentialEight_Requirement_Attribute_AssessmentStatus
|
||||
CloudApplicability: EssentialEight_Requirement_Attribute_CloudApplicability
|
||||
MitigatedThreats: list[str]
|
||||
Description: str
|
||||
RationaleStatement: str
|
||||
@@ -292,7 +292,7 @@ class Compliance_Requirement(BaseModel):
|
||||
Name: Optional[str] = None
|
||||
Attributes: list[
|
||||
Union[
|
||||
ASDEssentialEight_Requirement_Attribute,
|
||||
EssentialEight_Requirement_Attribute,
|
||||
CIS_Requirement_Attribute,
|
||||
ENS_Requirement_Attribute,
|
||||
ISO27001_2013_Requirement_Attribute,
|
||||
|
||||
+12
-16
@@ -62,9 +62,6 @@ VALID_CATEGORIES = frozenset(
|
||||
"e5",
|
||||
"privilege-escalation",
|
||||
"ec2-imdsv1",
|
||||
"vercel-hobby-plan",
|
||||
"vercel-pro-plan",
|
||||
"vercel-enterprise-plan",
|
||||
}
|
||||
)
|
||||
|
||||
@@ -247,15 +244,14 @@ class CheckMetadata(BaseModel):
|
||||
# store the compliance later if supplied
|
||||
Compliance: Optional[list[Any]] = Field(default_factory=list)
|
||||
|
||||
# TODO: Remove noqa and fix cls vulture errors
|
||||
@validator("Categories", each_item=True, pre=True, always=True)
|
||||
def valid_category(cls, value, values): # noqa: F841
|
||||
def valid_category(cls, value, values):
|
||||
if not isinstance(value, str):
|
||||
raise ValueError("Categories must be a list of strings")
|
||||
value_lower = value.lower()
|
||||
if not re.match("^[a-z0-9-]+$", value_lower):
|
||||
raise ValueError(
|
||||
f"Invalid category: {value}. Categories can only contain lowercase letters, numbers, and hyphen '-'"
|
||||
f"Invalid category: {value}. Categories can only contain lowercase letters, numbers and hyphen '-'"
|
||||
)
|
||||
if (
|
||||
value_lower not in VALID_CATEGORIES
|
||||
@@ -283,7 +279,7 @@ class CheckMetadata(BaseModel):
|
||||
return resource_type
|
||||
|
||||
@validator("ServiceName", pre=True, always=True)
|
||||
def validate_service_name(cls, service_name, values): # noqa: F841
|
||||
def validate_service_name(cls, service_name, values):
|
||||
if not service_name:
|
||||
raise ValueError("ServiceName must be a non-empty string")
|
||||
|
||||
@@ -300,7 +296,7 @@ class CheckMetadata(BaseModel):
|
||||
return service_name
|
||||
|
||||
@validator("CheckID", pre=True, always=True)
|
||||
def valid_check_id(cls, check_id, values): # noqa: F841
|
||||
def valid_check_id(cls, check_id, values):
|
||||
if not check_id:
|
||||
raise ValueError("CheckID must be a non-empty string")
|
||||
|
||||
@@ -313,7 +309,7 @@ class CheckMetadata(BaseModel):
|
||||
return check_id
|
||||
|
||||
@validator("CheckTitle", pre=True, always=True)
|
||||
def validate_check_title(cls, check_title, values): # noqa: F841
|
||||
def validate_check_title(cls, check_title, values):
|
||||
if values.get("Provider") not in EXTERNAL_TOOL_PROVIDERS:
|
||||
if len(check_title) > 150:
|
||||
raise ValueError(
|
||||
@@ -326,13 +322,13 @@ class CheckMetadata(BaseModel):
|
||||
return check_title
|
||||
|
||||
@validator("RelatedUrl", pre=True, always=True)
|
||||
def validate_related_url(cls, related_url, values): # noqa: F841
|
||||
def validate_related_url(cls, related_url, values):
|
||||
if related_url and values.get("Provider") not in EXTERNAL_TOOL_PROVIDERS:
|
||||
raise ValueError("RelatedUrl must be empty. This field is deprecated.")
|
||||
return related_url
|
||||
|
||||
@validator("Remediation")
|
||||
def validate_recommendation_url(cls, remediation, values): # noqa: F841
|
||||
def validate_recommendation_url(cls, remediation, values):
|
||||
if values.get("Provider") not in EXTERNAL_TOOL_PROVIDERS:
|
||||
url = remediation.Recommendation.Url
|
||||
if url and not url.startswith("https://hub.prowler.com/"):
|
||||
@@ -342,7 +338,7 @@ class CheckMetadata(BaseModel):
|
||||
return remediation
|
||||
|
||||
@validator("CheckType", pre=True, always=True)
|
||||
def validate_check_type(cls, check_type, values): # noqa: F841
|
||||
def validate_check_type(cls, check_type, values):
|
||||
provider = values.get("Provider", "").lower()
|
||||
|
||||
# Non-AWS providers must have an empty CheckType list
|
||||
@@ -371,7 +367,7 @@ class CheckMetadata(BaseModel):
|
||||
return check_type
|
||||
|
||||
@validator("Description", pre=True, always=True)
|
||||
def validate_description(cls, description, values): # noqa: F841
|
||||
def validate_description(cls, description, values):
|
||||
if values.get("Provider") not in EXTERNAL_TOOL_PROVIDERS:
|
||||
if len(description) > 400:
|
||||
raise ValueError(
|
||||
@@ -380,7 +376,7 @@ class CheckMetadata(BaseModel):
|
||||
return description
|
||||
|
||||
@validator("Risk", pre=True, always=True)
|
||||
def validate_risk(cls, risk, values): # noqa: F841
|
||||
def validate_risk(cls, risk, values):
|
||||
if values.get("Provider") not in EXTERNAL_TOOL_PROVIDERS:
|
||||
if len(risk) > 400:
|
||||
raise ValueError(
|
||||
@@ -389,7 +385,7 @@ class CheckMetadata(BaseModel):
|
||||
return risk
|
||||
|
||||
@validator("ResourceGroup", pre=True, always=True)
|
||||
def validate_resource_group(cls, resource_group): # noqa: F841
|
||||
def validate_resource_group(cls, resource_group):
|
||||
if resource_group and resource_group not in VALID_RESOURCE_GROUPS:
|
||||
raise ValueError(
|
||||
f"Invalid ResourceGroup: '{resource_group}'. Must be one of: {', '.join(sorted(VALID_RESOURCE_GROUPS))} or empty string."
|
||||
@@ -397,7 +393,7 @@ class CheckMetadata(BaseModel):
|
||||
return resource_group
|
||||
|
||||
@validator("AdditionalURLs", pre=True, always=True)
|
||||
def validate_additional_urls(cls, additional_urls): # noqa: F841
|
||||
def validate_additional_urls(cls, additional_urls):
|
||||
if not isinstance(additional_urls, list):
|
||||
raise ValueError("AdditionalURLs must be a list")
|
||||
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import sys
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.compliance.asd_essential_eight.asd_essential_eight import (
|
||||
get_asd_essential_eight_table,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.c5.c5 import get_c5_table
|
||||
from prowler.lib.outputs.compliance.ccc.ccc import get_ccc_table
|
||||
from prowler.lib.outputs.compliance.cis.cis import get_cis_table
|
||||
@@ -12,6 +9,9 @@ from prowler.lib.outputs.compliance.compliance_check import ( # noqa: F401 - re
|
||||
)
|
||||
from prowler.lib.outputs.compliance.csa.csa import get_csa_table
|
||||
from prowler.lib.outputs.compliance.ens.ens import get_ens_table
|
||||
from prowler.lib.outputs.compliance.essential_eight.essential_eight import (
|
||||
get_essential_eight_table,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.generic.generic_table import (
|
||||
get_generic_compliance_table,
|
||||
)
|
||||
@@ -233,8 +233,8 @@ def display_compliance_table(
|
||||
output_directory,
|
||||
compliance_overview,
|
||||
)
|
||||
elif "asd_essential_eight" in compliance_framework:
|
||||
get_asd_essential_eight_table(
|
||||
elif "essential_eight" in compliance_framework:
|
||||
get_essential_eight_table(
|
||||
findings,
|
||||
bulk_checks_metadata,
|
||||
compliance_framework,
|
||||
|
||||
+10
-10
@@ -4,7 +4,7 @@ from tabulate import tabulate
|
||||
from prowler.config.config import orange_color
|
||||
|
||||
|
||||
def get_asd_essential_eight_table(
|
||||
def get_essential_eight_table(
|
||||
findings: list,
|
||||
bulk_checks_metadata: dict,
|
||||
compliance_framework: str,
|
||||
@@ -13,7 +13,7 @@ def get_asd_essential_eight_table(
|
||||
compliance_overview: bool,
|
||||
):
|
||||
sections = {}
|
||||
asd_essential_eight_compliance_table = {
|
||||
essential_eight_compliance_table = {
|
||||
"Provider": [],
|
||||
"Section": [],
|
||||
"Status": [],
|
||||
@@ -26,7 +26,7 @@ def get_asd_essential_eight_table(
|
||||
check = bulk_checks_metadata[finding.check_metadata.CheckID]
|
||||
check_compliances = check.Compliance
|
||||
for compliance in check_compliances:
|
||||
if compliance.Framework == "ASD-Essential-Eight":
|
||||
if compliance.Framework == "Essential-Eight":
|
||||
for requirement in compliance.Requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
section = attribute.Section
|
||||
@@ -50,19 +50,19 @@ def get_asd_essential_eight_table(
|
||||
|
||||
sections = dict(sorted(sections.items()))
|
||||
for section in sections:
|
||||
asd_essential_eight_compliance_table["Provider"].append(compliance.Provider)
|
||||
asd_essential_eight_compliance_table["Section"].append(section)
|
||||
essential_eight_compliance_table["Provider"].append(compliance.Provider)
|
||||
essential_eight_compliance_table["Section"].append(section)
|
||||
if sections[section]["FAIL"] > 0:
|
||||
asd_essential_eight_compliance_table["Status"].append(
|
||||
essential_eight_compliance_table["Status"].append(
|
||||
f"{Fore.RED}FAIL({sections[section]['FAIL']}){Style.RESET_ALL}"
|
||||
)
|
||||
elif sections[section]["PASS"] > 0:
|
||||
asd_essential_eight_compliance_table["Status"].append(
|
||||
essential_eight_compliance_table["Status"].append(
|
||||
f"{Fore.GREEN}PASS({sections[section]['PASS']}){Style.RESET_ALL}"
|
||||
)
|
||||
else:
|
||||
asd_essential_eight_compliance_table["Status"].append("-")
|
||||
asd_essential_eight_compliance_table["Muted"].append(
|
||||
essential_eight_compliance_table["Status"].append("-")
|
||||
essential_eight_compliance_table["Muted"].append(
|
||||
f"{orange_color}{sections[section]['Muted']}{Style.RESET_ALL}"
|
||||
)
|
||||
if len(fail_count) + len(pass_count) + len(muted_count) > 1:
|
||||
@@ -84,7 +84,7 @@ def get_asd_essential_eight_table(
|
||||
)
|
||||
print(
|
||||
tabulate(
|
||||
asd_essential_eight_compliance_table,
|
||||
essential_eight_compliance_table,
|
||||
headers="keys",
|
||||
tablefmt="rounded_grid",
|
||||
)
|
||||
+6
-6
@@ -1,13 +1,13 @@
|
||||
from prowler.config.config import timestamp
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.asd_essential_eight.models import (
|
||||
ASDEssentialEightAWSModel,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.compliance.essential_eight.models import (
|
||||
EssentialEightAWSModel,
|
||||
)
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
|
||||
|
||||
class ASDEssentialEightAWS(ComplianceOutput):
|
||||
class EssentialEightAWS(ComplianceOutput):
|
||||
"""
|
||||
This class represents the AWS ASD Essential Eight compliance output.
|
||||
|
||||
@@ -41,7 +41,7 @@ class ASDEssentialEightAWS(ComplianceOutput):
|
||||
for requirement in compliance.Requirements:
|
||||
if requirement.Id in finding_requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = ASDEssentialEightAWSModel(
|
||||
compliance_row = EssentialEightAWSModel(
|
||||
Provider=finding.provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=finding.account_uid,
|
||||
@@ -77,7 +77,7 @@ class ASDEssentialEightAWS(ComplianceOutput):
|
||||
for requirement in compliance.Requirements:
|
||||
if not requirement.Checks:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = ASDEssentialEightAWSModel(
|
||||
compliance_row = EssentialEightAWSModel(
|
||||
Provider=compliance.Provider.lower(),
|
||||
Description=compliance.Description,
|
||||
AccountId="",
|
||||
+2
-2
@@ -1,9 +1,9 @@
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
|
||||
class ASDEssentialEightAWSModel(BaseModel):
|
||||
class EssentialEightAWSModel(BaseModel):
|
||||
"""
|
||||
ASDEssentialEightAWSModel generates a finding's output in AWS ASD Essential Eight Compliance format.
|
||||
EssentialEightAWSModel generates a finding's output in AWS ASD Essential Eight Compliance format.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
@@ -187,11 +187,9 @@ class Finding(BaseModel):
|
||||
output_data["account_uid"] = (
|
||||
output_data["account_organization_uid"]
|
||||
if "Tenant:" in check_output.subscription
|
||||
else check_output.subscription
|
||||
)
|
||||
output_data["account_name"] = provider.identity.subscriptions.get(
|
||||
check_output.subscription, check_output.subscription
|
||||
else provider.identity.subscriptions[check_output.subscription]
|
||||
)
|
||||
output_data["account_name"] = check_output.subscription
|
||||
output_data["resource_name"] = check_output.resource_name
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = check_output.location
|
||||
|
||||
@@ -492,11 +492,8 @@ class HTML(Output):
|
||||
"""
|
||||
try:
|
||||
printed_subscriptions = []
|
||||
for (
|
||||
subscription_id,
|
||||
display_name,
|
||||
) in provider.identity.subscriptions.items():
|
||||
intermediate = f"{display_name} : {subscription_id}"
|
||||
for key, value in provider.identity.subscriptions.items():
|
||||
intermediate = f"{key} : {value}"
|
||||
printed_subscriptions.append(intermediate)
|
||||
|
||||
# check if identity is str(coming from SP) or dict(coming from browser or)
|
||||
|
||||
@@ -82,11 +82,8 @@ class Slack:
|
||||
logo = gcp_logo
|
||||
elif provider.type == "azure":
|
||||
printed_subscriptions = []
|
||||
for (
|
||||
subscription_id,
|
||||
display_name,
|
||||
) in provider.identity.subscriptions.items():
|
||||
intermediate = f"- *{subscription_id}: {display_name}*\n"
|
||||
for key, value in provider.identity.subscriptions.items():
|
||||
intermediate = f"- *{key}: {value}*\n"
|
||||
printed_subscriptions.append(intermediate)
|
||||
identity = f"Azure Subscriptions:\n{''.join(printed_subscriptions)}"
|
||||
logo = azure_logo
|
||||
|
||||
@@ -185,13 +185,9 @@ def display_summary_table(
|
||||
print(
|
||||
f"\n{entity_type} {Fore.YELLOW}{audited_entities}{Style.RESET_ALL} Scan Results (severity columns are for fails only):"
|
||||
)
|
||||
if provider.type == "azure":
|
||||
scanned_subscriptions = ", ".join(
|
||||
f"{display_name} ({subscription_id})"
|
||||
for subscription_id, display_name in provider.identity.subscriptions.items()
|
||||
)
|
||||
if provider == "azure":
|
||||
print(
|
||||
f"\nSubscriptions scanned: {Fore.YELLOW}{scanned_subscriptions}{Style.RESET_ALL}"
|
||||
f"\nSubscriptions scanned: {Fore.YELLOW}{' '.join(provider.identity.subscriptions.keys())}{Style.RESET_ALL}"
|
||||
)
|
||||
print(tabulate(findings_table, headers="keys", tablefmt="rounded_grid"))
|
||||
print(
|
||||
|
||||
@@ -221,12 +221,27 @@ class CloudTrailTimeline(TimelineService):
|
||||
|
||||
@staticmethod
|
||||
def _extract_actor(user_identity: Dict[str, Any]) -> str:
|
||||
"""Return a compact actor name from CloudTrail userIdentity.
|
||||
|
||||
For ARNs, returns the resource portion (everything after the last
|
||||
`:`) — e.g. `user/alice`, `assumed-role/MyRole/session-name`,
|
||||
`root`. The full ARN is preserved separately in `actor_uid`.
|
||||
"""
|
||||
"""Extract a human-readable actor name from CloudTrail userIdentity."""
|
||||
# Try ARN first - most reliable
|
||||
if arn := user_identity.get("arn"):
|
||||
return arn.rsplit(":", 1)[-1]
|
||||
return user_identity.get("invokedBy") or "Unknown"
|
||||
if "/" in arn:
|
||||
parts = arn.split("/")
|
||||
# For assumed-role, return the role name (second-to-last part)
|
||||
if "assumed-role" in arn and len(parts) >= 2:
|
||||
return parts[-2]
|
||||
return parts[-1]
|
||||
return arn.split(":")[-1]
|
||||
|
||||
# Fall back to userName
|
||||
if username := user_identity.get("userName"):
|
||||
return username
|
||||
|
||||
# Fall back to principalId
|
||||
if principal_id := user_identity.get("principalId"):
|
||||
return principal_id
|
||||
|
||||
# For service-invoked actions
|
||||
if invoking_service := user_identity.get("invokedBy"):
|
||||
return invoking_service
|
||||
|
||||
return "Unknown"
|
||||
|
||||
-39
@@ -1,39 +0,0 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "bedrock_prompt_management_exists",
|
||||
"CheckTitle": "Amazon Bedrock Prompt Management prompts exist in the region",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks/AWS Security Best Practices"
|
||||
],
|
||||
"ServiceName": "bedrock",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Other",
|
||||
"ResourceGroup": "ai_ml",
|
||||
"Description": "**Bedrock Prompt Management** enables centralized creation, versioning, and governance of prompts used with foundation models.\n\nThis region-level check verifies whether at least one managed prompt exists in each scanned region, used as an adoption signal for Prompt Management. The presence of a prompt does not by itself guarantee that every application prompt is managed.",
|
||||
"Risk": "Without **Prompt Management**, prompts are scattered across applications with no central oversight, versioning, or auditability over instructions sent to foundation models, weakening governance and compliance posture.\n\nManaged prompts are a governance enabler; **prompt injection** defenses are provided by Bedrock **guardrails**, covered by separate checks.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management.html",
|
||||
"https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management-create.html"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws bedrock-agent create-prompt --name example_prompt --default-variant default --variants '[{\"name\":\"default\",\"templateType\":\"TEXT\",\"templateConfiguration\":{\"text\":{\"text\":\"Your prompt template here.\"}}}]'",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Open the Amazon Bedrock console\n2. Navigate to Prompt Management\n3. Click Create prompt\n4. Provide a name and configure the prompt template (a prompt can contain at most one variant; additional variants are created via CreatePromptVersion)\n5. Save the prompt",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Adopt **Bedrock Prompt Management** to centralize prompt definitions, enforce versioning, and maintain governance over model interactions.\n\nUse managed prompts with **guardrails** and apply **least privilege** access controls to restrict who can create or modify prompts.",
|
||||
"Url": "https://hub.prowler.com/check/bedrock_prompt_management_exists"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"gen-ai"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Results are generated per scanned region. Regions where `ListPrompts` cannot be queried are omitted from the findings."
|
||||
}
|
||||
-54
@@ -1,54 +0,0 @@
|
||||
"""Check for region-level Bedrock Prompt Management adoption."""
|
||||
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
from prowler.providers.aws.services.bedrock.bedrock_agent_client import (
|
||||
bedrock_agent_client,
|
||||
)
|
||||
|
||||
|
||||
class bedrock_prompt_management_exists(Check):
|
||||
"""Check whether Amazon Bedrock Prompt Management prompts exist in the region.
|
||||
|
||||
A region is reported only when ListPrompts succeeded for it; regions where
|
||||
the API call failed (e.g. AccessDenied, unsupported region) are skipped at
|
||||
the service layer and produce no finding.
|
||||
|
||||
- PASS: At least one managed prompt exists in the region (one finding per prompt).
|
||||
- FAIL: No managed prompts exist in the region (one finding per region).
|
||||
"""
|
||||
|
||||
def execute(self) -> list[Check_Report_AWS]:
|
||||
"""Execute the Bedrock Prompt Management exists check.
|
||||
|
||||
Returns:
|
||||
A list of reports containing the result of the check.
|
||||
"""
|
||||
findings = []
|
||||
for region in sorted(bedrock_agent_client.prompt_scanned_regions):
|
||||
regional_prompts = sorted(
|
||||
(
|
||||
prompt
|
||||
for prompt in bedrock_agent_client.prompts.values()
|
||||
if prompt.region == region
|
||||
),
|
||||
key=lambda prompt: prompt.name,
|
||||
)
|
||||
|
||||
if regional_prompts:
|
||||
for prompt in regional_prompts:
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=prompt)
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Bedrock Prompt Management prompt {prompt.name} exists in region {region}."
|
||||
findings.append(report)
|
||||
else:
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource={})
|
||||
report.region = region
|
||||
report.resource_id = "prompt-management"
|
||||
report.resource_arn = f"arn:{bedrock_agent_client.audited_partition}:bedrock:{region}:{bedrock_agent_client.audited_account}:prompt-management"
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"No Bedrock Prompt Management prompts exist in region {region}."
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -140,10 +140,7 @@ class BedrockAgent(AWSService):
|
||||
# Call AWSService's __init__
|
||||
super().__init__("bedrock-agent", provider)
|
||||
self.agents = {}
|
||||
self.prompts = {}
|
||||
self.prompt_scanned_regions: set = set()
|
||||
self.__threading_call__(self._list_agents)
|
||||
self.__threading_call__(self._list_prompts)
|
||||
self.__threading_call__(self._list_tags_for_resource, self.agents.values())
|
||||
|
||||
def _list_agents(self, regional_client):
|
||||
@@ -170,32 +167,7 @@ class BedrockAgent(AWSService):
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _list_prompts(self, regional_client):
|
||||
"""List all prompts in a region.
|
||||
|
||||
Prompt Management is evaluated as a region-level adoption signal, so
|
||||
prompt collection is intentionally not filtered by audit_resources.
|
||||
"""
|
||||
logger.info("Bedrock Agent - Listing Prompts...")
|
||||
try:
|
||||
paginator = regional_client.get_paginator("list_prompts")
|
||||
for page in paginator.paginate():
|
||||
for prompt in page.get("promptSummaries", []):
|
||||
prompt_arn = prompt.get("arn", "")
|
||||
self.prompts[prompt_arn] = Prompt(
|
||||
id=prompt.get("id", ""),
|
||||
name=prompt.get("name", ""),
|
||||
arn=prompt_arn,
|
||||
region=regional_client.region,
|
||||
)
|
||||
self.prompt_scanned_regions.add(regional_client.region)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _list_tags_for_resource(self, resource):
|
||||
"""List tags for a Bedrock Agent resource."""
|
||||
logger.info("Bedrock Agent - Listing Tags for Resource...")
|
||||
try:
|
||||
agent_tags = (
|
||||
@@ -218,12 +190,3 @@ class Agent(BaseModel):
|
||||
guardrail_id: Optional[str] = None
|
||||
region: str
|
||||
tags: Optional[list] = []
|
||||
|
||||
|
||||
class Prompt(BaseModel):
|
||||
"""Model representing a Bedrock Prompt Management prompt."""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
arn: str
|
||||
region: str
|
||||
|
||||
+1
-2
@@ -34,8 +34,7 @@
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"secrets",
|
||||
"ec2-imdsv1"
|
||||
"secrets"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
|
||||
+1
-2
@@ -36,8 +36,7 @@
|
||||
},
|
||||
"Categories": [
|
||||
"identity-access",
|
||||
"secrets",
|
||||
"ec2-imdsv1"
|
||||
"secrets"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
|
||||
@@ -441,8 +441,8 @@ class AzureProvider(Provider):
|
||||
None
|
||||
"""
|
||||
printed_subscriptions = []
|
||||
for subscription_id, display_name in self._identity.subscriptions.items():
|
||||
intermediate = display_name + ": " + subscription_id
|
||||
for key, value in self._identity.subscriptions.items():
|
||||
intermediate = key + ": " + value
|
||||
printed_subscriptions.append(intermediate)
|
||||
report_lines = [
|
||||
f"Azure Tenant Domain: {Fore.YELLOW}{self._identity.tenant_domain}{Style.RESET_ALL} Azure Tenant ID: {Fore.YELLOW}{self._identity.tenant_ids[0]}{Style.RESET_ALL}",
|
||||
@@ -969,30 +969,19 @@ class AzureProvider(Provider):
|
||||
)
|
||||
if not subscription_ids:
|
||||
logger.info("Scanning all the Azure subscriptions...")
|
||||
# TODO: get tags or labels
|
||||
# TODO: fill with AzureSubscription
|
||||
subscription_pairs = [
|
||||
(subscription.display_name, subscription.subscription_id)
|
||||
for subscription in subscriptions_client.subscriptions.list()
|
||||
]
|
||||
for subscription in subscriptions_client.subscriptions.list():
|
||||
# TODO: get tags or labels
|
||||
# TODO: fill with AzureSubscription
|
||||
identity.subscriptions.update(
|
||||
{subscription.display_name: subscription.subscription_id}
|
||||
)
|
||||
else:
|
||||
logger.info("Scanning the subscriptions passed as argument ...")
|
||||
subscription_pairs = [
|
||||
(
|
||||
subscriptions_client.subscriptions.get(
|
||||
subscription_id=id
|
||||
).display_name,
|
||||
id,
|
||||
for id in subscription_ids:
|
||||
subscription = subscriptions_client.subscriptions.get(
|
||||
subscription_id=id
|
||||
)
|
||||
for id in subscription_ids
|
||||
]
|
||||
|
||||
# Key the subscriptions dict by subscription ID (which is
|
||||
# guaranteed unique) and store the display name as the value.
|
||||
# This avoids collisions when multiple subscriptions share
|
||||
# the same display name.
|
||||
for display_name, subscription_id in subscription_pairs:
|
||||
identity.subscriptions[subscription_id] = display_name
|
||||
identity.subscriptions.update({subscription.display_name: id})
|
||||
|
||||
# If there are no subscriptions listed -> checks are not going to be run against any resource
|
||||
if not identity.subscriptions:
|
||||
@@ -1028,28 +1017,28 @@ class AzureProvider(Provider):
|
||||
|
||||
Returns:
|
||||
A dictionary containing the locations available for each subscription. The dictionary
|
||||
has subscription IDs as keys and lists of location names as values.
|
||||
has subscription display names as keys and lists of location names as values.
|
||||
|
||||
Examples:
|
||||
>>> provider = AzureProvider(...)
|
||||
>>> provider.get_locations()
|
||||
{
|
||||
'sub-id-1': ['eastus', 'eastus2', 'westus', 'westus2'],
|
||||
'sub-id-2': ['eastus', 'eastus2', 'westus', 'westus2']
|
||||
'Subscription 1': ['eastus', 'eastus2', 'westus', 'westus2'],
|
||||
'Subscription 2': ['eastus', 'eastus2', 'westus', 'westus2']
|
||||
}
|
||||
"""
|
||||
credentials = self.session
|
||||
subscription_client = SubscriptionClient(credentials)
|
||||
locations = {}
|
||||
|
||||
for subscription_id, display_name in self._identity.subscriptions.items():
|
||||
locations[subscription_id] = []
|
||||
for display_name, subscription_id in self._identity.subscriptions.items():
|
||||
locations[display_name] = []
|
||||
|
||||
# List locations for each subscription
|
||||
for location in subscription_client.subscriptions.list_locations(
|
||||
subscription_id
|
||||
):
|
||||
locations[subscription_id].append(location.name)
|
||||
locations[display_name].append(location.name)
|
||||
|
||||
return locations
|
||||
|
||||
|
||||
@@ -8,23 +8,17 @@ class AzureMutelist(Mutelist):
|
||||
self,
|
||||
finding: Check_Report_Azure,
|
||||
subscription_id: str,
|
||||
subscription_name: str = "",
|
||||
) -> bool:
|
||||
account_names = [subscription_id]
|
||||
for account_name in (subscription_name, finding.subscription):
|
||||
if account_name and account_name not in account_names:
|
||||
account_names.append(account_name)
|
||||
|
||||
tags = unroll_dict(unroll_tags(finding.resource_tags))
|
||||
|
||||
for account_name in account_names:
|
||||
if self.is_muted(
|
||||
account_name,
|
||||
finding.check_metadata.CheckID,
|
||||
finding.location,
|
||||
finding.resource_name,
|
||||
tags,
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
return self.is_muted(
|
||||
subscription_id, # support Azure Subscription ID in mutelist
|
||||
finding.check_metadata.CheckID,
|
||||
finding.location,
|
||||
finding.resource_name,
|
||||
unroll_dict(unroll_tags(finding.resource_tags)),
|
||||
) or self.is_muted(
|
||||
finding.subscription, # support Azure Subscription Name in mutelist
|
||||
finding.check_metadata.CheckID,
|
||||
finding.location,
|
||||
finding.resource_name,
|
||||
unroll_dict(unroll_tags(finding.resource_tags)),
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user