Compare commits

..

12 Commits

Author SHA1 Message Date
Prowler Bot 796f2e1976 fix(oci): fix identity clients (#10524)
Co-authored-by: rchotacode <32524742+rchotacode@users.noreply.github.com>
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2026-03-31 09:52:29 +02:00
Prowler Bot 5afc54e27a fix(oci): Add multi region filtering argument support (#10505)
Co-authored-by: rchotacode <32524742+rchotacode@users.noreply.github.com>
Co-authored-by: Ronan Chota <ronan.chota@saic.com>
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2026-03-30 08:50:05 +02:00
Prowler Bot 391e99d788 fix(oci): Fix service region support (#10504)
Co-authored-by: rchotacode <32524742+rchotacode@users.noreply.github.com>
Co-authored-by: Ronan Chota <ronan.chota@saic.com>
2026-03-30 08:23:19 +02:00
Prowler Bot 9299755722 fix: Prowler's changelog (#10476)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2026-03-26 07:36:52 +01:00
Prowler Bot cff5898690 fix(aws): set partition's region for global services (#10474)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2026-03-26 07:33:16 +01:00
Prowler Bot 2ad0a12293 fix(oci): false positive for kms key rotation check (#10465)
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
2026-03-25 11:12:31 +01:00
Prowler Bot 5bfe996e95 fix(oci): false positive for password policies (#10463)
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
2026-03-25 11:01:22 +01:00
Prowler Bot f4601fe61c fix(sdk): support renamed OCI IdP mapping events (#10447)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2026-03-24 13:24:15 +00:00
Prowler Bot 6996458cf4 chore(api): Bump version to v1.23.1 (#10442)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-03-24 14:05:59 +01:00
Prowler Bot bd23ad471c chore(release): Bump version to v5.22.1 (#10443)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-03-24 14:04:45 +01:00
Prowler Bot 04a77a1836 docs: Update version to v5.22.0 (#10444)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-03-24 14:04:03 +01:00
Prowler Bot 1a9b76047a chore(api): Update prowler dependency to v5.22 for release 5.22.0 (#10438)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-03-24 12:46:23 +01:00
1010 changed files with 23207 additions and 89260 deletions
+1 -4
View File
@@ -78,9 +78,6 @@ TASK_RETRY_ATTEMPTS=5
# Valkey settings
# If running Valkey and celery on host, use localhost, else use 'valkey'
VALKEY_SCHEME=redis
VALKEY_USERNAME=
VALKEY_PASSWORD=
VALKEY_HOST=valkey
VALKEY_PORT=6379
VALKEY_DB=0
@@ -145,7 +142,7 @@ SENTRY_RELEASE=local
NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
#### Prowler release version ####
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.24.5
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.16.0
# Social login credentials
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
+3 -13
View File
@@ -13,19 +13,11 @@ inputs:
poetry-version:
description: 'Poetry version to install'
required: false
default: '2.3.4'
default: '2.1.1'
install-dependencies:
description: 'Install Python dependencies with Poetry'
required: false
default: 'true'
update-lock:
description: 'Run `poetry lock` during setup. Only enable when a prior step mutates pyproject.toml (e.g. API `@master` VCS rewrite). Default: false.'
required: false
default: 'false'
enable-cache:
description: 'Whether to enable Poetry dependency caching via actions/setup-python'
required: false
default: 'true'
runs:
using: 'composite'
@@ -82,7 +74,7 @@ runs:
grep -A2 -B2 "resolved_reference" poetry.lock
- name: Update poetry.lock (prowler repo only)
if: github.repository == 'prowler-cloud/prowler' && inputs.update-lock == 'true'
if: github.repository == 'prowler-cloud/prowler'
shell: bash
working-directory: ${{ inputs.working-directory }}
run: poetry lock
@@ -91,9 +83,7 @@ runs:
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: ${{ inputs.python-version }}
# Disable cache when callers skip dependency install: Poetry 2.3.4 creates
# the venv in a path setup-python can't hash, breaking the post-step save-cache.
cache: ${{ inputs.enable-cache == 'true' && 'poetry' || '' }}
cache: 'poetry'
cache-dependency-path: ${{ inputs.working-directory }}/poetry.lock
- name: Install Python dependencies
+1 -4
View File
@@ -117,10 +117,7 @@ runs:
INPUTS_IMAGE_TAG: ${{ inputs.image-tag }}
- name: Comment scan results on PR
if: >-
inputs.create-pr-comment == 'true'
&& github.event_name == 'pull_request'
&& github.event.pull_request.head.repo.full_name == github.repository
if: inputs.create-pr-comment == 'true' && github.event_name == 'pull_request'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
IMAGE_NAME: ${{ inputs.image-name }}
-7
View File
@@ -67,11 +67,6 @@ provider/googleworkspace:
- any-glob-to-any-file: "prowler/providers/googleworkspace/**"
- any-glob-to-any-file: "tests/providers/googleworkspace/**"
provider/vercel:
- changed-files:
- any-glob-to-any-file: "prowler/providers/vercel/**"
- any-glob-to-any-file: "tests/providers/vercel/**"
github_actions:
- changed-files:
- any-glob-to-any-file: ".github/workflows/*"
@@ -107,8 +102,6 @@ mutelist:
- any-glob-to-any-file: "tests/providers/openstack/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/googleworkspace/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/googleworkspace/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/vercel/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/vercel/lib/mutelist/**"
integration/s3:
- changed-files:
-8
View File
@@ -177,14 +177,6 @@ modules:
- tests/providers/llm/**
e2e: []
- name: sdk-vercel
match:
- prowler/providers/vercel/**
- prowler/compliance/vercel/**
tests:
- tests/providers/vercel/**
e2e: []
# ============================================
# SDK - Lib modules
# ============================================
-17
View File
@@ -13,8 +13,6 @@ env:
PROWLER_VERSION: ${{ github.event.release.tag_name }}
BASE_BRANCH: master
permissions: {}
jobs:
detect-release-type:
runs-on: ubuntu-latest
@@ -29,11 +27,6 @@ jobs:
patch_version: ${{ steps.detect.outputs.patch_version }}
current_api_version: ${{ steps.get_api_version.outputs.current_api_version }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -86,11 +79,6 @@ jobs:
contents: read
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -216,11 +204,6 @@ jobs:
contents: read
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
+1 -14
View File
@@ -17,8 +17,6 @@ concurrency:
env:
API_WORKING_DIR: ./api
permissions: {}
jobs:
api-code-quality:
runs-on: ubuntu-latest
@@ -34,16 +32,6 @@ jobs:
working-directory: ./api
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
pypi.org:443
files.pythonhosted.org:443
api.github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -52,7 +40,7 @@ jobs:
- name: Check for API changes
id: check-changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
api/**
@@ -69,7 +57,6 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
working-directory: ./api
update-lock: 'true'
- name: Poetry check
if: steps.check-changes.outputs.any_changed == 'true'
-14
View File
@@ -24,8 +24,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
api-analyze:
name: CodeQL Security Analysis
@@ -43,18 +41,6 @@ jobs:
- 'python'
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
github.com:443
release-assets.githubusercontent.com:443
uploads.github.com:443
release-assets.githubusercontent.com:443
objects.githubusercontent.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
+10 -66
View File
@@ -18,6 +18,9 @@ on:
required: true
type: string
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: false
@@ -33,8 +36,6 @@ env:
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
permissions: {}
jobs:
setup:
if: github.repository == 'prowler-cloud/prowler'
@@ -42,14 +43,7 @@ jobs:
timeout-minutes: 5
outputs:
short-sha: ${{ steps.set-short-sha.outputs.short-sha }}
permissions:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
- name: Calculate short SHA
id: set-short-sha
run: echo "short-sha=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
@@ -61,14 +55,7 @@ jobs:
timeout-minutes: 5
outputs:
message-ts: ${{ steps.slack-notification.outputs.ts }}
permissions:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -107,26 +94,6 @@ jobs:
packages: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
_http._tcp.deb.debian.org:443
aka.ms:443
auth.docker.io:443
cdn.powershellgallery.com:443
dc.services.visualstudio.com:443
debian.map.fastlydns.net:80
files.pythonhosted.org:443
github.com:443
powershellinfraartifacts-gkhedzdeaghdezhr.z01.azurefd.net:443
production.cloudflare.docker.com:443
pypi.org:443
registry-1.docker.io:443
release-assets.githubusercontent.com:443
www.powershellgallery.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -139,18 +106,18 @@ jobs:
sed -i "s|prowler-cloud/prowler.git@master|prowler-cloud/prowler.git@${LATEST_SHA}|" api/pyproject.toml
- name: Login to DockerHub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Build and push API container for ${{ matrix.arch }}
id: container-push
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
@@ -165,26 +132,17 @@ jobs:
needs: [setup, container-build-push]
if: always() && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
release-assets.githubusercontent.com:443
registry-1.docker.io:443
auth.docker.io:443
production.cloudflare.docker.com:443
- name: Login to DockerHub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Create and push manifests for push event
if: github.event_name == 'push'
run: |
@@ -226,14 +184,7 @@ jobs:
needs: [setup, notify-release-started, container-build-push, create-manifest]
runs-on: ubuntu-latest
timeout-minutes: 5
permissions:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -276,13 +227,6 @@ jobs:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
- name: Trigger API deployment
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
with:
+4 -37
View File
@@ -18,8 +18,6 @@ env:
API_WORKING_DIR: ./api
IMAGE_NAME: prowler-api
permissions: {}
jobs:
api-dockerfile-lint:
if: github.repository == 'prowler-cloud/prowler'
@@ -29,13 +27,6 @@ jobs:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -44,7 +35,7 @@ jobs:
- name: Check if Dockerfile changed
id: dockerfile-changed
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: api/Dockerfile
@@ -74,30 +65,6 @@ jobs:
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
mirror.gcr.io:443
check.trivy.dev:443
github.com:443
registry-1.docker.io:443
auth.docker.io:443
production.cloudflare.docker.com:443
debian.map.fastlydns.net:80
release-assets.githubusercontent.com:443
objects.githubusercontent.com:443
pypi.org:443
files.pythonhosted.org:443
www.powershellgallery.com:443
aka.ms:443
cdn.powershellgallery.com:443
_http._tcp.deb.debian.org:443
powershellinfraartifacts-gkhedzdeaghdezhr.z01.azurefd.net:443
get.trivy.dev:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -106,7 +73,7 @@ jobs:
- name: Check for API changes
id: check-changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: api/**
files_ignore: |
@@ -117,11 +84,11 @@ jobs:
- name: Set up Docker Buildx
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Build container for ${{ matrix.arch }}
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: ${{ env.API_WORKING_DIR }}
push: false
+2 -19
View File
@@ -17,8 +17,6 @@ concurrency:
env:
API_WORKING_DIR: ./api
permissions: {}
jobs:
api-security-scans:
runs-on: ubuntu-latest
@@ -34,19 +32,6 @@ jobs:
working-directory: ./api
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
pypi.org:443
files.pythonhosted.org:443
github.com:443
auth.safetycli.com:443
pyup.io:443
data.safetycli.com:443
api.github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -55,7 +40,7 @@ jobs:
- name: Check for API changes
id: check-changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
api/**
@@ -72,7 +57,6 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
working-directory: ./api
update-lock: 'true'
- name: Bandit
if: steps.check-changes.outputs.any_changed == 'true'
@@ -80,10 +64,9 @@ jobs:
- name: Safety
if: steps.check-changes.outputs.any_changed == 'true'
run: poetry run safety check --ignore 79023,79027,86217,71600
run: poetry run safety check --ignore 79023,79027,86217
# TODO: 79023 & 79027 knack ReDoS until `azure-cli-core` (via `cartography`) allows `knack` >=0.13.0
# TODO: 86217 because `alibabacloud-tea-openapi == 0.4.3` don't let us upgrade `cryptography >= 46.0.0`
# TODO: 71600 CVE-2024-1135 false positive - fixed in gunicorn 22.0.0, project uses 23.0.0
- name: Vulture
if: steps.check-changes.outputs.any_changed == 'true'
+1 -23
View File
@@ -22,16 +22,11 @@ env:
POSTGRES_USER: prowler_user
POSTGRES_PASSWORD: prowler
POSTGRES_DB: postgres-db
VALKEY_SCHEME: redis
VALKEY_USERNAME: ""
VALKEY_PASSWORD: ""
VALKEY_HOST: localhost
VALKEY_PORT: 6379
VALKEY_DB: 0
API_WORKING_DIR: ./api
permissions: {}
jobs:
api-tests:
runs-on: ubuntu-latest
@@ -77,22 +72,6 @@ jobs:
--health-retries 5
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
pypi.org:443
files.pythonhosted.org:443
cli.codecov.io:443
keybase.io:443
ingest.codecov.io:443
storage.googleapis.com:443
o26192.ingest.us.sentry.io:443
api.github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -101,7 +80,7 @@ jobs:
- name: Check for API changes
id: check-changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
api/**
@@ -118,7 +97,6 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
working-directory: ./api
update-lock: 'true'
- name: Run tests with pytest
if: steps.check-changes.outputs.any_changed == 'true'
+1 -10
View File
@@ -17,8 +17,6 @@ env:
BACKPORT_LABEL_PREFIX: backport-to-
BACKPORT_LABEL_IGNORE: was-backported
permissions: {}
jobs:
backport:
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
@@ -29,13 +27,6 @@ jobs:
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
- name: Check labels
id: label_check
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
@@ -48,7 +39,7 @@ jobs:
- name: Backport PR
if: steps.label_check.outputs.label_check == 'success'
uses: sorenlouv/backport-github-action@9460b7102fea25466026ce806c9ebf873ac48721 # v11.0.0
uses: sorenlouv/backport-github-action@516854e7c9f962b9939085c9a92ea28411d1ae90 # v10.2.0
with:
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
+1 -13
View File
@@ -21,8 +21,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
zizmor:
if: github.repository == 'prowler-cloud/prowler'
@@ -35,22 +33,12 @@ jobs:
actions: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
ghcr.io:443
pkg-containers.githubusercontent.com:443
api.github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Run zizmor
uses: zizmorcore/zizmor-action@71321a20a9ded102f6e9ce5718a2fcec2c4f70d8 # v0.5.2
uses: zizmorcore/zizmor-action@0dce2577a4760a2749d8cfb7a84b7d5585ebcb7d # v0.5.0
with:
token: ${{ github.token }}
@@ -9,8 +9,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number }}
cancel-in-progress: false
permissions: {}
jobs:
update-labels:
if: contains(github.event.issue.labels.*.name, 'status/awaiting-response')
@@ -21,11 +19,6 @@ jobs:
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Remove 'status/awaiting-response' label
env:
GH_TOKEN: ${{ github.token }}
@@ -16,8 +16,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
permissions: {}
jobs:
conventional-commit-check:
runs-on: ubuntu-latest
@@ -27,11 +25,6 @@ jobs:
pull-requests: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Check PR title format
uses: agenthunt/conventional-commit-checker-action@f1823f632e95a64547566dcd2c7da920e67117ad # v2.0.1
with:
@@ -13,8 +13,6 @@ env:
BACKPORT_LABEL_PREFIX: backport-to-
BACKPORT_LABEL_COLOR: B60205
permissions: {}
jobs:
create-label:
runs-on: ubuntu-latest
@@ -24,11 +22,6 @@ jobs:
issues: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Create backport label for minor releases
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+7 -64
View File
@@ -13,8 +13,6 @@ env:
PROWLER_VERSION: ${{ github.event.release.tag_name }}
BASE_BRANCH: master
permissions: {}
jobs:
detect-release-type:
runs-on: ubuntu-latest
@@ -29,11 +27,6 @@ jobs:
patch_version: ${{ steps.detect.outputs.patch_version }}
current_docs_version: ${{ steps.get_docs_version.outputs.current_docs_version }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -86,11 +79,6 @@ jobs:
contents: read
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -130,7 +118,7 @@ jobs:
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: ${{ env.BASE_BRANCH }}
base: master
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
@@ -216,10 +204,10 @@ jobs:
contents: read
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
egress-policy: audit
persist-credentials: false
- name: Calculate next patch version
run: |
@@ -245,52 +233,7 @@ jobs:
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_PATCH_VERSION: ${{ needs.detect-release-type.outputs.patch_version }}
NEEDS_DETECT_RELEASE_TYPE_OUTPUTS_CURRENT_DOCS_VERSION: ${{ needs.detect-release-type.outputs.current_docs_version }}
- name: Checkout master branch
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ env.BASE_BRANCH }}
persist-credentials: false
- name: Bump versions in documentation for master
run: |
set -e
# Update prowler-app.mdx with current release version
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
echo "Files modified:"
git --no-pager diff
- name: Create PR for documentation update to master
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: ${{ env.BASE_BRANCH }}
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
labels: no-changelog,skip-sync
body: |
### Description
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
### Files Updated
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
- name: Checkout version branch
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ env.VERSION_BRANCH }}
persist-credentials: false
- name: Bump versions in documentation for version branch
- name: Bump versions in documentation for patch version
run: |
set -e
@@ -308,13 +251,13 @@ jobs:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: ${{ env.VERSION_BRANCH }}
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}-branch
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
labels: no-changelog,skip-sync
body: |
### Description
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} in version branch after releasing Prowler v${{ env.PROWLER_VERSION }}.
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
### Files Updated
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
-11
View File
@@ -14,8 +14,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
scan-secrets:
runs-on: ubuntu-latest
@@ -24,15 +22,6 @@ jobs:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
ghcr.io:443
pkg-containers.githubusercontent.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
+2 -9
View File
@@ -21,8 +21,6 @@ concurrency:
env:
CHART_PATH: contrib/k8s/helm/prowler-app
permissions: {}
jobs:
helm-lint:
if: github.repository == 'prowler-cloud/prowler'
@@ -32,18 +30,13 @@ jobs:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: Set up Helm
uses: azure/setup-helm@dda3372f752e03dde6b3237bc9431cdc2f7a02a2 # v5.0.0
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # v4.3.1
- name: Update chart dependencies
run: helm dependency update ${{ env.CHART_PATH }}
+2 -9
View File
@@ -13,8 +13,6 @@ concurrency:
env:
CHART_PATH: contrib/k8s/helm/prowler-app
permissions: {}
jobs:
release-helm-chart:
if: github.repository == 'prowler-cloud/prowler'
@@ -25,18 +23,13 @@ jobs:
packages: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: Set up Helm
uses: azure/setup-helm@dda3372f752e03dde6b3237bc9431cdc2f7a02a2 # v5.0.0
uses: azure/setup-helm@b9e51907a09c216f16ebe8536097933489208112 # v4.3.0
- name: Set appVersion from release tag
run: |
-53
View File
@@ -1,53 +0,0 @@
name: 'Tools: Lock Issue on Close'
on:
issues:
types:
- closed
concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number }}
cancel-in-progress: false
permissions: {}
jobs:
lock:
if: |
github.repository == 'prowler-cloud/prowler' &&
github.event.issue.locked == false
runs-on: ubuntu-latest
timeout-minutes: 5
permissions:
issues: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
- name: Comment and lock issue
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const { owner, repo } = context.repo;
const issue_number = context.payload.issue.number;
try {
await github.rest.issues.createComment({
owner,
repo,
issue_number,
body: 'This issue is now locked as it has been closed. If you are still hitting a related problem, please open a new issue and link back to this one for context. Thanks!'
});
} catch (error) {
core.warning(`Failed to post lock comment on issue #${issue_number}: ${error.message}`);
}
const lockParams = { owner, repo, issue_number };
if (context.payload.issue.state_reason === 'completed') {
lockParams.lock_reason = 'resolved';
}
await github.rest.issues.lock(lockParams);
+9 -39
View File
@@ -65,11 +65,6 @@ jobs:
text: ${{ steps.compute-text.outputs.text }}
title: ${{ steps.compute-text.outputs.title }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Setup Scripts
uses: github/gh-aw/actions/setup@9382be3ca9ac18917e111a99d4e6bbff58d0dccc # v0.43.23
with:
@@ -134,11 +129,6 @@ jobs:
output_types: ${{ steps.collect_output.outputs.output_types }}
secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Setup Scripts
uses: github/gh-aw/actions/setup@9382be3ca9ac18917e111a99d4e6bbff58d0dccc # v0.43.23
with:
@@ -772,7 +762,7 @@ jobs:
SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload Safe Outputs
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: safe-output
path: ${{ env.GH_AW_SAFE_OUTPUTS }}
@@ -793,13 +783,13 @@ jobs:
await main();
- name: Upload sanitized agent output
if: always() && env.GH_AW_AGENT_OUTPUT
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: agent-output
path: ${{ env.GH_AW_AGENT_OUTPUT }}
if-no-files-found: warn
- name: Upload engine output files
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: agent_outputs
path: |
@@ -839,7 +829,7 @@ jobs:
- name: Upload agent artifacts
if: always()
continue-on-error: true
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: agent-artifacts
path: |
@@ -869,18 +859,13 @@ jobs:
tools_reported: ${{ steps.missing_tool.outputs.tools_reported }}
total_count: ${{ steps.missing_tool.outputs.total_count }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Setup Scripts
uses: github/gh-aw/actions/setup@9382be3ca9ac18917e111a99d4e6bbff58d0dccc # v0.43.23
with:
destination: /opt/gh-aw/actions
- name: Download agent output artifact
continue-on-error: true
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: agent-output
path: /tmp/gh-aw/safeoutputs/
@@ -981,24 +966,19 @@ jobs:
outputs:
success: ${{ steps.parse_results.outputs.success }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Setup Scripts
uses: github/gh-aw/actions/setup@9382be3ca9ac18917e111a99d4e6bbff58d0dccc # v0.43.23
with:
destination: /opt/gh-aw/actions
- name: Download agent artifacts
continue-on-error: true
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: agent-artifacts
path: /tmp/gh-aw/threat-detection/
- name: Download agent output artifact
continue-on-error: true
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: agent-output
path: /tmp/gh-aw/threat-detection/
@@ -1071,7 +1051,7 @@ jobs:
await main();
- name: Upload threat detection log
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: threat-detection.log
path: /tmp/gh-aw/threat-detection/detection.log
@@ -1090,11 +1070,6 @@ jobs:
outputs:
activated: ${{ (steps.check_membership.outputs.is_team_member == 'true') && (steps.check_rate_limit.outputs.rate_limit_ok == 'true') }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Setup Scripts
uses: github/gh-aw/actions/setup@9382be3ca9ac18917e111a99d4e6bbff58d0dccc # v0.43.23
with:
@@ -1163,18 +1138,13 @@ jobs:
process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }}
process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Setup Scripts
uses: github/gh-aw/actions/setup@9382be3ca9ac18917e111a99d4e6bbff58d0dccc # v0.43.23
with:
destination: /opt/gh-aw/actions
- name: Download agent output artifact
continue-on-error: true
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: agent-output
path: /tmp/gh-aw/safeoutputs/
+1 -14
View File
@@ -15,8 +15,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
permissions: {}
jobs:
labeler:
runs-on: ubuntu-latest
@@ -26,11 +24,6 @@ jobs:
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Apply labels to PR
uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
with:
@@ -45,11 +38,6 @@ jobs:
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Check if author is org member
id: check_membership
env:
@@ -77,8 +65,7 @@ jobs:
"RosaRivasProwler"
"StylusFrost"
"toniblyx"
"davidm4r"
"pfe-nazaries"
"vicferpoy"
)
echo "Checking if $AUTHOR is a member of prowler-cloud organization"
+10 -61
View File
@@ -17,6 +17,9 @@ on:
required: true
type: string
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: false
@@ -32,8 +35,6 @@ env:
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-mcp
permissions: {}
jobs:
setup:
if: github.repository == 'prowler-cloud/prowler'
@@ -41,14 +42,7 @@ jobs:
timeout-minutes: 5
outputs:
short-sha: ${{ steps.set-short-sha.outputs.short-sha }}
permissions:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
- name: Calculate short SHA
id: set-short-sha
run: echo "short-sha=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
@@ -60,14 +54,7 @@ jobs:
timeout-minutes: 5
outputs:
message-ts: ${{ steps.slack-notification.outputs.ts }}
permissions:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -105,38 +92,24 @@ jobs:
contents: read
packages: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
registry-1.docker.io:443
auth.docker.io:443
production.cloudflare.docker.com:443
ghcr.io:443
pkg-containers.githubusercontent.com:443
files.pythonhosted.org:443
pypi.org:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Login to DockerHub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Build and push MCP container for ${{ matrix.arch }}
id: container-push
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
@@ -159,27 +132,17 @@ jobs:
needs: [setup, container-build-push]
if: always() && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
registry-1.docker.io:443
auth.docker.io:443
production.cloudflare.docker.com:443
github.com:443
release-assets.githubusercontent.com:443
- name: Login to DockerHub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Create and push manifests for push event
if: github.event_name == 'push'
run: |
@@ -221,14 +184,7 @@ jobs:
needs: [setup, notify-release-started, container-build-push, create-manifest]
runs-on: ubuntu-latest
timeout-minutes: 5
permissions:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -271,13 +227,6 @@ jobs:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
- name: Trigger MCP deployment
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
with:
+4 -33
View File
@@ -18,8 +18,6 @@ env:
MCP_WORKING_DIR: ./mcp_server
IMAGE_NAME: prowler-mcp
permissions: {}
jobs:
mcp-dockerfile-lint:
if: github.repository == 'prowler-cloud/prowler'
@@ -29,13 +27,6 @@ jobs:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -44,7 +35,7 @@ jobs:
- name: Check if Dockerfile changed
id: dockerfile-changed
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: mcp_server/Dockerfile
@@ -73,26 +64,6 @@ jobs:
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
registry-1.docker.io:443
auth.docker.io:443
production.cloudflare.docker.com:443
ghcr.io:443
pkg-containers.githubusercontent.com:443
files.pythonhosted.org:443
pypi.org:443
api.github.com:443
mirror.gcr.io:443
check.trivy.dev:443
get.trivy.dev:443
release-assets.githubusercontent.com:443
objects.githubusercontent.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -101,7 +72,7 @@ jobs:
- name: Check for MCP changes
id: check-changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: mcp_server/**
files_ignore: |
@@ -110,11 +81,11 @@ jobs:
- name: Set up Docker Buildx
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Build MCP container for ${{ matrix.arch }}
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: ${{ env.MCP_WORKING_DIR }}
push: false
+1 -13
View File
@@ -14,8 +14,6 @@ env:
PYTHON_VERSION: "3.12"
WORKING_DIRECTORY: ./mcp_server
permissions: {}
jobs:
validate-release:
if: github.repository == 'prowler-cloud/prowler'
@@ -28,11 +26,6 @@ jobs:
major_version: ${{ steps.parse-version.outputs.major }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Parse and validate version
id: parse-version
run: |
@@ -66,18 +59,13 @@ jobs:
url: https://pypi.org/project/prowler-mcp/
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Install uv
uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v7.3.1
uses: astral-sh/setup-uv@5a095e7a2014a4212f075830d4f7277575a9d098 # v7
with:
enable-cache: false
+1 -11
View File
@@ -16,8 +16,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
permissions: {}
jobs:
check-changelog:
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
@@ -30,14 +28,6 @@ jobs:
MONITORED_FOLDERS: 'api ui prowler mcp_server'
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -47,7 +37,7 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
api/**
@@ -1,182 +0,0 @@
name: 'Tools: Check Compliance Mapping'
on:
pull_request:
types:
- 'opened'
- 'synchronize'
- 'reopened'
- 'labeled'
- 'unlabeled'
branches:
- 'master'
- 'v5.*'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
permissions: {}
jobs:
check-compliance-mapping:
if: contains(github.event.pull_request.labels.*.name, 'no-compliance-check') == false
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 0
# zizmor: ignore[artipacked]
persist-credentials: true # Required by tj-actions/changed-files to fetch PR branch
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
with:
files: |
prowler/providers/**/services/**/*.metadata.json
prowler/compliance/**/*.json
- name: Check if new checks are mapped in compliance
id: compliance-check
run: |
ADDED_METADATA="${STEPS_CHANGED_FILES_OUTPUTS_ADDED_FILES}"
ALL_CHANGED="${STEPS_CHANGED_FILES_OUTPUTS_ALL_CHANGED_FILES}"
# Filter only new metadata files (new checks)
new_checks=""
for f in $ADDED_METADATA; do
case "$f" in *.metadata.json) new_checks="$new_checks $f" ;; esac
done
if [ -z "$(echo "$new_checks" | tr -d ' ')" ]; then
echo "No new checks detected."
echo "has_new_checks=false" >> "$GITHUB_OUTPUT"
exit 0
fi
# Collect compliance files changed in this PR
changed_compliance=""
for f in $ALL_CHANGED; do
case "$f" in prowler/compliance/*.json) changed_compliance="$changed_compliance $f" ;; esac
done
UNMAPPED=""
MAPPED=""
for metadata_file in $new_checks; do
check_dir=$(dirname "$metadata_file")
check_id=$(basename "$check_dir")
provider=$(echo "$metadata_file" | cut -d'/' -f3)
# Read CheckID from the metadata JSON for accuracy
if [ -f "$metadata_file" ]; then
json_check_id=$(python3 -c "import json; print(json.load(open('$metadata_file')).get('CheckID', ''))" 2>/dev/null || echo "")
if [ -n "$json_check_id" ]; then
check_id="$json_check_id"
fi
fi
# Search for the check ID in compliance files changed in this PR
found_in=""
for comp_file in $changed_compliance; do
if grep -q "\"${check_id}\"" "$comp_file" 2>/dev/null; then
found_in="${found_in}$(basename "$comp_file" .json), "
fi
done
if [ -n "$found_in" ]; then
found_in=$(echo "$found_in" | sed 's/, $//')
MAPPED="${MAPPED}- \`${check_id}\` (\`${provider}\`): ${found_in}"$'\n'
else
UNMAPPED="${UNMAPPED}- \`${check_id}\` (\`${provider}\`)"$'\n'
fi
done
echo "has_new_checks=true" >> "$GITHUB_OUTPUT"
if [ -n "$UNMAPPED" ]; then
echo "has_unmapped=true" >> "$GITHUB_OUTPUT"
else
echo "has_unmapped=false" >> "$GITHUB_OUTPUT"
fi
{
echo "unmapped<<EOF"
echo -e "${UNMAPPED}"
echo "EOF"
} >> "$GITHUB_OUTPUT"
{
echo "mapped<<EOF"
echo -e "${MAPPED}"
echo "EOF"
} >> "$GITHUB_OUTPUT"
env:
STEPS_CHANGED_FILES_OUTPUTS_ADDED_FILES: ${{ steps.changed-files.outputs.added_files }}
STEPS_CHANGED_FILES_OUTPUTS_ALL_CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }}
- name: Manage compliance review label
if: steps.compliance-check.outputs.has_new_checks == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
HAS_UNMAPPED: ${{ steps.compliance-check.outputs.has_unmapped }}
run: |
LABEL_NAME="needs-compliance-review"
if [ "$HAS_UNMAPPED" = "true" ]; then
echo "Adding compliance review label to PR #${PR_NUMBER}..."
gh pr edit "$PR_NUMBER" --add-label "$LABEL_NAME" --repo "${{ github.repository }}" || true
else
echo "Removing compliance review label from PR #${PR_NUMBER}..."
gh pr edit "$PR_NUMBER" --remove-label "$LABEL_NAME" --repo "${{ github.repository }}" || true
fi
- name: Find existing compliance comment
if: steps.compliance-check.outputs.has_new_checks == 'true' && github.event.pull_request.head.repo.full_name == github.repository
id: find-comment
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: '<!-- compliance-mapping-check -->'
- name: Create or update compliance comment
if: steps.compliance-check.outputs.has_new_checks == 'true' && github.event.pull_request.head.repo.full_name == github.repository
uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v5.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
comment-id: ${{ steps.find-comment.outputs.comment-id }}
edit-mode: replace
body: |
<!-- compliance-mapping-check -->
## Compliance Mapping Review
This PR adds new checks. Please verify that they have been mapped to the relevant compliance framework requirements.
${{ steps.compliance-check.outputs.unmapped != '' && format('### New checks not mapped to any compliance framework in this PR
{0}
> Please review whether these checks should be added to compliance framework requirements in `prowler/compliance/<provider>/`. Each compliance JSON has a `Checks` array inside each requirement — add the check ID there if it satisfies that requirement.', steps.compliance-check.outputs.unmapped) || '' }}
${{ steps.compliance-check.outputs.mapped != '' && format('### New checks already mapped in this PR
{0}', steps.compliance-check.outputs.mapped) || '' }}
Use the `no-compliance-check` label to skip this check.
+1 -8
View File
@@ -15,8 +15,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
permissions: {}
jobs:
check-conflicts:
runs-on: ubuntu-latest
@@ -27,11 +25,6 @@ jobs:
issues: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout PR head
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -41,7 +34,7 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: '**'
-9
View File
@@ -12,8 +12,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: false
permissions: {}
jobs:
trigger-cloud-pull-request:
if: |
@@ -25,13 +23,6 @@ jobs:
permissions:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
- name: Calculate short commit SHA
id: vars
run: |
+8 -12
View File
@@ -17,8 +17,6 @@ concurrency:
env:
PROWLER_VERSION: ${{ inputs.prowler_version }}
permissions: {}
jobs:
prepare-release:
if: github.event_name == 'workflow_dispatch' && github.repository == 'prowler-cloud/prowler'
@@ -28,11 +26,6 @@ jobs:
contents: write
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -40,12 +33,15 @@ jobs:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
persist-credentials: false
- name: Setup Python with Poetry
uses: ./.github/actions/setup-python-poetry
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: '3.12'
install-dependencies: 'false'
enable-cache: 'false'
- name: Install Poetry
run: |
python3 -m pip install --user poetry==2.1.1
echo "$HOME/.local/bin" >> $GITHUB_PATH
- name: Configure Git
run: |
@@ -379,7 +375,7 @@ jobs:
no-changelog
- name: Create draft release
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0
with:
tag_name: ${{ env.PROWLER_VERSION }}
name: Prowler ${{ env.PROWLER_VERSION }}
-17
View File
@@ -13,8 +13,6 @@ env:
PROWLER_VERSION: ${{ github.event.release.tag_name }}
BASE_BRANCH: master
permissions: {}
jobs:
detect-release-type:
runs-on: ubuntu-latest
@@ -28,11 +26,6 @@ jobs:
minor_version: ${{ steps.detect.outputs.minor_version }}
patch_version: ${{ steps.detect.outputs.patch_version }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Detect release type and parse version
id: detect
run: |
@@ -73,11 +66,6 @@ jobs:
contents: read
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -187,11 +175,6 @@ jobs:
contents: read
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -10,8 +10,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
check-duplicate-test-names:
if: github.repository == 'prowler-cloud/prowler'
@@ -21,13 +19,6 @@ jobs:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
+15 -14
View File
@@ -14,8 +14,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
sdk-code-quality:
if: github.repository == 'prowler-cloud/prowler'
@@ -26,20 +24,12 @@ jobs:
strategy:
matrix:
python-version:
- '3.9'
- '3.10'
- '3.11'
- '3.12'
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
pypi.org:443
files.pythonhosted.org:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -48,7 +38,7 @@ jobs:
- name: Check for SDK changes
id: check-changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: ./**
files_ignore: |
@@ -71,11 +61,22 @@ jobs:
contrib/**
**/AGENTS.md
- name: Setup Python with Poetry
- name: Install Poetry
if: steps.check-changes.outputs.any_changed == 'true'
uses: ./.github/actions/setup-python-poetry
run: pipx install poetry==2.1.1
- name: Set up Python ${{ matrix.python-version }}
if: steps.check-changes.outputs.any_changed == 'true'
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ matrix.python-version }}
cache: 'poetry'
- name: Install dependencies
if: steps.check-changes.outputs.any_changed == 'true'
run: |
poetry install --no-root
poetry run pip list
- name: Check Poetry lock file
if: steps.check-changes.outputs.any_changed == 'true'
-12
View File
@@ -30,8 +30,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
sdk-analyze:
if: github.repository == 'prowler-cloud/prowler'
@@ -50,16 +48,6 @@ jobs:
- 'python'
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
github.com:443
release-assets.githubusercontent.com:443
uploads.github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
+23 -117
View File
@@ -23,6 +23,9 @@ on:
required: true
type: string
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: false
@@ -42,13 +45,10 @@ env:
# Container registries
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler
TONIBLYX_DOCKERHUB_REPOSITORY: toniblyx
# AWS configuration (for ECR)
AWS_REGION: us-east-1
permissions: {}
jobs:
setup:
if: github.repository == 'prowler-cloud/prowler'
@@ -59,32 +59,21 @@ jobs:
prowler_version_major: ${{ steps.get-prowler-version.outputs.prowler_version_major }}
latest_tag: ${{ steps.get-prowler-version.outputs.latest_tag }}
stable_tag: ${{ steps.get-prowler-version.outputs.stable_tag }}
permissions:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
pypi.org:443
files.pythonhosted.org:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Setup Python with Poetry
uses: ./.github/actions/setup-python-poetry
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ env.PYTHON_VERSION }}
install-dependencies: 'false'
enable-cache: 'false'
- name: Inject poetry-bumpversion plugin
run: pipx inject poetry poetry-bumpversion
- name: Install Poetry
run: |
pipx install poetry==2.1.1
pipx inject poetry poetry-bumpversion
- name: Get Prowler version and set tags
id: get-prowler-version
@@ -126,14 +115,7 @@ jobs:
timeout-minutes: 5
outputs:
message-ts: ${{ steps.slack-notification.outputs.ts }}
permissions:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -172,40 +154,19 @@ jobs:
packages: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.ecr-public.us-east-1.amazonaws.com:443
public.ecr.aws:443
registry-1.docker.io:443
production.cloudflare.docker.com:443
auth.docker.io:443
debian.map.fastlydns.net:80
github.com:443
release-assets.githubusercontent.com:443
pypi.org:443
files.pythonhosted.org:443
www.powershellgallery.com:443
aka.ms:443
cdn.powershellgallery.com:443
_http._tcp.deb.debian.org:443
powershellinfraartifacts-gkhedzdeaghdezhr.z01.azurefd.net:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Login to DockerHub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Public ECR
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
registry: public.ecr.aws
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
@@ -214,12 +175,12 @@ jobs:
AWS_REGION: ${{ env.AWS_REGION }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Build and push SDK container for ${{ matrix.arch }}
id: container-push
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: .
file: ${{ env.DOCKERFILE_PATH }}
@@ -235,32 +196,16 @@ jobs:
needs: [setup, container-build-push]
if: always() && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
registry-1.docker.io:443
auth.docker.io:443
public.ecr.aws:443
production.cloudflare.docker.com:443
github.com:443
release-assets.githubusercontent.com:443
api.ecr-public.us-east-1.amazonaws.com:443
- name: Login to DockerHub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Public ECR
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
registry: public.ecr.aws
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
@@ -268,11 +213,15 @@ jobs:
env:
AWS_REGION: ${{ env.AWS_REGION }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Create and push manifests for push event
if: github.event_name == 'push'
run: |
docker buildx imagetools create \
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_LATEST_TAG} \
-t ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_LATEST_TAG} \
-t ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_LATEST_TAG} \
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_LATEST_TAG}-amd64 \
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_LATEST_TAG}-arm64
@@ -283,10 +232,12 @@ jobs:
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
run: |
docker buildx imagetools create \
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_PROWLER_VERSION} \
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_STABLE_TAG} \
-t ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${NEEDS_SETUP_OUTPUTS_PROWLER_VERSION} \
-t ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${NEEDS_SETUP_OUTPUTS_STABLE_TAG} \
-t ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${NEEDS_SETUP_OUTPUTS_PROWLER_VERSION} \
-t ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${NEEDS_SETUP_OUTPUTS_STABLE_TAG} \
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_PROWLER_VERSION} \
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_STABLE_TAG} \
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_LATEST_TAG}-amd64 \
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_LATEST_TAG}-arm64
env:
@@ -294,39 +245,6 @@ jobs:
NEEDS_SETUP_OUTPUTS_STABLE_TAG: ${{ needs.setup.outputs.stable_tag }}
NEEDS_SETUP_OUTPUTS_LATEST_TAG: ${{ needs.setup.outputs.latest_tag }}
# Push to toniblyx/prowler only for current version (latest/stable/release tags)
- name: Login to DockerHub (toniblyx)
if: needs.setup.outputs.latest_tag == 'latest'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.TONIBLYX_DOCKERHUB_USERNAME }}
password: ${{ secrets.TONIBLYX_DOCKERHUB_PASSWORD }}
- name: Push manifests to toniblyx for push event
if: needs.setup.outputs.latest_tag == 'latest' && github.event_name == 'push'
run: |
docker buildx imagetools create \
-t ${{ env.TONIBLYX_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:latest \
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:latest
- name: Push manifests to toniblyx for release event
if: needs.setup.outputs.latest_tag == 'latest' && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
run: |
docker buildx imagetools create \
-t ${{ env.TONIBLYX_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${NEEDS_SETUP_OUTPUTS_PROWLER_VERSION} \
-t ${{ env.TONIBLYX_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:stable \
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:stable
env:
NEEDS_SETUP_OUTPUTS_PROWLER_VERSION: ${{ needs.setup.outputs.prowler_version }}
# Re-login as prowlercloud for cleanup of intermediate tags
- name: Login to DockerHub (prowlercloud)
if: always()
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Install regctl
if: always()
uses: regclient/actions/regctl-installer@da9319db8e44e8b062b3a147e1dfb2f574d41a03 # main
@@ -346,14 +264,7 @@ jobs:
needs: [setup, notify-release-started, container-build-push, create-manifest]
runs-on: ubuntu-latest
timeout-minutes: 5
permissions:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -396,11 +307,6 @@ jobs:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Calculate short SHA
id: short-sha
run: echo "short_sha=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
+4 -37
View File
@@ -17,8 +17,6 @@ concurrency:
env:
IMAGE_NAME: prowler
permissions: {}
jobs:
sdk-dockerfile-lint:
if: github.repository == 'prowler-cloud/prowler'
@@ -28,13 +26,6 @@ jobs:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -43,7 +34,7 @@ jobs:
- name: Check if Dockerfile changed
id: dockerfile-changed
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: Dockerfile
@@ -73,30 +64,6 @@ jobs:
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
registry-1.docker.io:443
auth.docker.io:443
production.cloudflare.docker.com:443
api.github.com:443
mirror.gcr.io:443
check.trivy.dev:443
debian.map.fastlydns.net:80
release-assets.githubusercontent.com:443
objects.githubusercontent.com:443
pypi.org:443
files.pythonhosted.org:443
www.powershellgallery.com:443
aka.ms:443
cdn.powershellgallery.com:443
_http._tcp.deb.debian.org:443
powershellinfraartifacts-gkhedzdeaghdezhr.z01.azurefd.net:443
get.trivy.dev:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -105,7 +72,7 @@ jobs:
- name: Check for SDK changes
id: check-changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: ./**
files_ignore: |
@@ -130,11 +97,11 @@ jobs:
- name: Set up Docker Buildx
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Build SDK container for ${{ matrix.arch }}
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: .
push: false
+10 -25
View File
@@ -13,8 +13,6 @@ env:
RELEASE_TAG: ${{ github.event.release.tag_name }}
PYTHON_VERSION: '3.12'
permissions: {}
jobs:
validate-release:
if: github.repository == 'prowler-cloud/prowler'
@@ -27,11 +25,6 @@ jobs:
major_version: ${{ steps.parse-version.outputs.major }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Parse and validate version
id: parse-version
run: |
@@ -65,22 +58,18 @@ jobs:
url: https://pypi.org/project/prowler/${{ needs.validate-release.outputs.prowler_version }}/
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Setup Python with Poetry
uses: ./.github/actions/setup-python-poetry
- name: Install Poetry
run: pipx install poetry==2.1.1
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ env.PYTHON_VERSION }}
install-dependencies: 'false'
enable-cache: 'false'
- name: Build Prowler package
run: poetry build
@@ -102,22 +91,18 @@ jobs:
url: https://pypi.org/project/prowler-cloud/${{ needs.validate-release.outputs.prowler_version }}/
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Setup Python with Poetry
uses: ./.github/actions/setup-python-poetry
- name: Install Poetry
run: pipx install poetry==2.1.1
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ env.PYTHON_VERSION }}
install-dependencies: 'false'
enable-cache: 'false'
- name: Install toml package
run: pip install toml
@@ -13,8 +13,6 @@ env:
PYTHON_VERSION: '3.12'
AWS_REGION: 'us-east-1'
permissions: {}
jobs:
refresh-aws-regions:
if: github.repository == 'prowler-cloud/prowler'
@@ -26,11 +24,6 @@ jobs:
contents: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -12,8 +12,6 @@ concurrency:
env:
PYTHON_VERSION: '3.12'
permissions: {}
jobs:
refresh-oci-regions:
if: github.repository == 'prowler-cloud/prowler'
@@ -24,11 +22,6 @@ jobs:
contents: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
+12 -18
View File
@@ -14,8 +14,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
sdk-security-scans:
if: github.repository == 'prowler-cloud/prowler'
@@ -25,19 +23,6 @@ jobs:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
pypi.org:443
files.pythonhosted.org:443
github.com:443
auth.safetycli.com:443
pyup.io:443
data.safetycli.com:443
api.github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -46,7 +31,7 @@ jobs:
- name: Check for SDK changes
id: check-changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files:
./**
@@ -71,11 +56,20 @@ jobs:
contrib/**
**/AGENTS.md
- name: Setup Python with Poetry
- name: Install Poetry
if: steps.check-changes.outputs.any_changed == 'true'
uses: ./.github/actions/setup-python-poetry
run: pipx install poetry==2.1.1
- name: Set up Python 3.12
if: steps.check-changes.outputs.any_changed == 'true'
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: '3.12'
cache: 'poetry'
- name: Install dependencies
if: steps.check-changes.outputs.any_changed == 'true'
run: poetry install --no-root
- name: Security scan with Bandit
if: steps.check-changes.outputs.any_changed == 'true'
+29 -75
View File
@@ -14,8 +14,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
sdk-tests:
if: github.repository == 'prowler-cloud/prowler'
@@ -26,41 +24,12 @@ jobs:
strategy:
matrix:
python-version:
- '3.9'
- '3.10'
- '3.11'
- '3.12'
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
pypi.org:443
files.pythonhosted.org:443
api.github.com:443
release-assets.githubusercontent.com:443
*.amazonaws.com:443
*.googleapis.com:443
schema.ocsf.io:443
registry-1.docker.io:443
production.cloudflare.docker.com:443
powershellinfraartifacts-gkhedzdeaghdezhr.z01.azurefd.net:443
o26192.ingest.us.sentry.io:443
management.azure.com:443
login.microsoftonline.com:443
keybase.io:443
ingest.codecov.io:443
graph.microsoft.com:443
dc.services.visualstudio.com:443
cloud.mongodb.com:443
cli.codecov.io:443
auth.docker.io:443
api.vercel.com:443
api.atlassian.com:443
aka.ms:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -69,7 +38,7 @@ jobs:
- name: Check for SDK changes
id: check-changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: ./**
files_ignore: |
@@ -92,17 +61,26 @@ jobs:
contrib/**
**/AGENTS.md
- name: Setup Python with Poetry
- name: Install Poetry
if: steps.check-changes.outputs.any_changed == 'true'
uses: ./.github/actions/setup-python-poetry
run: pipx install poetry==2.1.1
- name: Set up Python ${{ matrix.python-version }}
if: steps.check-changes.outputs.any_changed == 'true'
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: ${{ matrix.python-version }}
cache: 'poetry'
- name: Install dependencies
if: steps.check-changes.outputs.any_changed == 'true'
run: poetry install --no-root
# AWS Provider
- name: Check if AWS files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-aws
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/aws/**
@@ -209,11 +187,11 @@ jobs:
echo "AWS service_paths='${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}'"
if [ "${STEPS_AWS_SERVICES_OUTPUTS_RUN_ALL}" = "true" ]; then
poetry run pytest -p no:randomly -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
elif [ -z "${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}" ]; then
echo "No AWS service paths detected; skipping AWS tests."
else
poetry run pytest -p no:randomly -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml ${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml ${STEPS_AWS_SERVICES_OUTPUTS_SERVICE_PATHS}
fi
env:
STEPS_AWS_SERVICES_OUTPUTS_RUN_ALL: ${{ steps.aws-services.outputs.run_all }}
@@ -232,7 +210,7 @@ jobs:
- name: Check if Azure files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-azure
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/azure/**
@@ -256,7 +234,7 @@ jobs:
- name: Check if GCP files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-gcp
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/gcp/**
@@ -280,7 +258,7 @@ jobs:
- name: Check if Kubernetes files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-kubernetes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/kubernetes/**
@@ -304,7 +282,7 @@ jobs:
- name: Check if GitHub files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-github
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/github/**
@@ -328,7 +306,7 @@ jobs:
- name: Check if NHN files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-nhn
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/nhn/**
@@ -352,7 +330,7 @@ jobs:
- name: Check if M365 files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-m365
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/m365/**
@@ -376,7 +354,7 @@ jobs:
- name: Check if IaC files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-iac
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/iac/**
@@ -400,7 +378,7 @@ jobs:
- name: Check if MongoDB Atlas files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-mongodbatlas
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/mongodbatlas/**
@@ -424,7 +402,7 @@ jobs:
- name: Check if OCI files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-oraclecloud
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/oraclecloud/**
@@ -448,7 +426,7 @@ jobs:
- name: Check if OpenStack files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-openstack
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/openstack/**
@@ -472,7 +450,7 @@ jobs:
- name: Check if Google Workspace files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-googleworkspace
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/**/googleworkspace/**
@@ -492,35 +470,11 @@ jobs:
flags: prowler-py${{ matrix.python-version }}-googleworkspace
files: ./googleworkspace_coverage.xml
# Vercel Provider
- name: Check if Vercel files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-vercel
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
with:
files: |
./prowler/**/vercel/**
./tests/**/vercel/**
./poetry.lock
- name: Run Vercel tests
if: steps.changed-vercel.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/providers/vercel --cov-report=xml:vercel_coverage.xml tests/providers/vercel
- name: Upload Vercel coverage to Codecov
if: steps.changed-vercel.outputs.any_changed == 'true'
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-vercel
files: ./vercel_coverage.xml
# Lib
- name: Check if Lib files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-lib
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/lib/**
@@ -544,7 +498,7 @@ jobs:
- name: Check if Config files changed
if: steps.check-changes.outputs.any_changed == 'true'
id: changed-config
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
./prowler/config/**
+1 -14
View File
@@ -31,8 +31,6 @@ on:
description: "Whether there are UI E2E tests to run"
value: ${{ jobs.analyze.outputs.has-ui-e2e }}
permissions: {}
jobs:
analyze:
runs-on: ubuntu-latest
@@ -47,19 +45,8 @@ jobs:
has-sdk-tests: ${{ steps.set-flags.outputs.has-sdk-tests }}
has-api-tests: ${{ steps.set-flags.outputs.has-api-tests }}
has-ui-e2e: ${{ steps.set-flags.outputs.has-ui-e2e }}
permissions:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
pypi.org:443
files.pythonhosted.org:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -68,7 +55,7 @@ jobs:
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
- name: Setup Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
+3 -20
View File
@@ -13,8 +13,6 @@ env:
PROWLER_VERSION: ${{ github.event.release.tag_name }}
BASE_BRANCH: master
permissions: {}
jobs:
detect-release-type:
runs-on: ubuntu-latest
@@ -28,11 +26,6 @@ jobs:
minor_version: ${{ steps.detect.outputs.minor_version }}
patch_version: ${{ steps.detect.outputs.patch_version }}
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Detect release type and parse version
id: detect
run: |
@@ -73,11 +66,6 @@ jobs:
contents: read
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -101,7 +89,7 @@ jobs:
run: |
set -e
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=.*|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_MINOR_VERSION}|" .env
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_MINOR_VERSION}|" .env
echo "Files modified:"
git --no-pager diff
@@ -155,7 +143,7 @@ jobs:
run: |
set -e
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=.*|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${FIRST_PATCH_VERSION}|" .env
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${FIRST_PATCH_VERSION}|" .env
echo "Files modified:"
git --no-pager diff
@@ -191,11 +179,6 @@ jobs:
contents: read
pull-requests: write
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -225,7 +208,7 @@ jobs:
run: |
set -e
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=.*|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_PATCH_VERSION}|" .env
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_PATCH_VERSION}|" .env
echo "Files modified:"
git --no-pager diff
-12
View File
@@ -26,8 +26,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
ui-analyze:
if: github.repository == 'prowler-cloud/prowler'
@@ -46,16 +44,6 @@ jobs:
- 'javascript-typescript'
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
github.com:443
release-assets.githubusercontent.com:443
uploads.github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
+10 -61
View File
@@ -17,6 +17,9 @@ on:
required: true
type: string
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: false
@@ -35,8 +38,6 @@ env:
# Build args
NEXT_PUBLIC_API_BASE_URL: http://prowler-api:8080/api/v1
permissions: {}
jobs:
setup:
if: github.repository == 'prowler-cloud/prowler'
@@ -44,14 +45,7 @@ jobs:
timeout-minutes: 5
outputs:
short-sha: ${{ steps.set-short-sha.outputs.short-sha }}
permissions:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Calculate short SHA
id: set-short-sha
run: echo "short-sha=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
@@ -63,14 +57,7 @@ jobs:
timeout-minutes: 5
outputs:
message-ts: ${{ steps.slack-notification.outputs.ts }}
permissions:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -109,38 +96,24 @@ jobs:
packages: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
registry-1.docker.io:443
production.cloudflare.docker.com:443
auth.docker.io:443
registry.npmjs.org:443
dl-cdn.alpinelinux.org:443
fonts.googleapis.com:443
fonts.gstatic.com:443
github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Login to DockerHub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Build and push UI container for ${{ matrix.arch }}
id: container-push
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: ${{ env.WORKING_DIRECTORY }}
build-args: |
@@ -158,27 +131,17 @@ jobs:
needs: [setup, container-build-push]
if: always() && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
release-assets.githubusercontent.com:443
registry-1.docker.io:443
auth.docker.io:443
production.cloudflare.docker.com:443
- name: Login to DockerHub
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Create and push manifests for push event
if: github.event_name == 'push'
run: |
@@ -220,14 +183,7 @@ jobs:
needs: [setup, notify-release-started, container-build-push, create-manifest]
runs-on: ubuntu-latest
timeout-minutes: 5
permissions:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -270,13 +226,6 @@ jobs:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
api.github.com:443
- name: Trigger UI deployment
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
with:
+4 -33
View File
@@ -18,8 +18,6 @@ env:
UI_WORKING_DIR: ./ui
IMAGE_NAME: prowler-ui
permissions: {}
jobs:
ui-dockerfile-lint:
if: github.repository == 'prowler-cloud/prowler'
@@ -29,13 +27,6 @@ jobs:
contents: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -44,7 +35,7 @@ jobs:
- name: Check if Dockerfile changed
id: dockerfile-changed
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: ui/Dockerfile
@@ -74,26 +65,6 @@ jobs:
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
registry-1.docker.io:443
auth.docker.io:443
production.cloudflare.docker.com:443
registry.npmjs.org:443
dl-cdn.alpinelinux.org:443
fonts.googleapis.com:443
fonts.gstatic.com:443
api.github.com:443
mirror.gcr.io:443
check.trivy.dev:443
get.trivy.dev:443
release-assets.githubusercontent.com:443
objects.githubusercontent.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -102,7 +73,7 @@ jobs:
- name: Check for UI changes
id: check-changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: ui/**
files_ignore: |
@@ -112,11 +83,11 @@ jobs:
- name: Set up Docker Buildx
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Build UI container for ${{ matrix.arch }}
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: ${{ env.UI_WORKING_DIR }}
target: prod
+8 -23
View File
@@ -15,14 +15,13 @@ on:
- 'ui/**'
- 'api/**' # API changes can affect UI E2E
permissions: {}
permissions:
contents: read
jobs:
# First, analyze which tests need to run
impact-analysis:
if: github.repository == 'prowler-cloud/prowler'
permissions:
contents: read
uses: ./.github/workflows/test-impact-analysis.yml
# Run E2E tests based on impact analysis
@@ -76,15 +75,8 @@ jobs:
# Pass E2E paths from impact analysis
E2E_TEST_PATHS: ${{ needs.impact-analysis.outputs.ui-e2e }}
RUN_ALL_TESTS: ${{ needs.impact-analysis.outputs.run-all }}
permissions:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -160,21 +152,21 @@ jobs:
'
- name: Setup Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version: '24.13.0'
- name: Setup pnpm
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4
with:
package_json_file: ui/package.json
version: 10
run_install: false
- name: Get pnpm store directory
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- name: Setup pnpm and Next.js cache
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: |
${{ env.STORE_PATH }}
@@ -194,7 +186,7 @@ jobs:
run: pnpm run build
- name: Cache Playwright browsers
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
id: playwright-cache
with:
path: ~/.cache/ms-playwright
@@ -261,7 +253,7 @@ jobs:
fi
- name: Upload test reports
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
if: failure()
with:
name: playwright-report
@@ -281,14 +273,7 @@ jobs:
needs.impact-analysis.outputs.has-ui-e2e != 'true' &&
needs.impact-analysis.outputs.run-all != 'true'
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: No E2E tests needed
run: |
echo "## E2E Tests Skipped" >> $GITHUB_STEP_SUMMARY
+7 -21
View File
@@ -18,8 +18,6 @@ env:
UI_WORKING_DIR: ./ui
NODE_VERSION: '24.13.0'
permissions: {}
jobs:
ui-tests:
runs-on: ubuntu-latest
@@ -31,18 +29,6 @@ jobs:
working-directory: ./ui
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: block
allowed-endpoints: >
github.com:443
registry.npmjs.org:443
fonts.googleapis.com:443
fonts.gstatic.com:443
api.github.com:443
release-assets.githubusercontent.com:443
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
@@ -51,7 +37,7 @@ jobs:
- name: Check for UI changes
id: check-changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
ui/**
@@ -64,7 +50,7 @@ jobs:
- name: Get changed source files for targeted tests
id: changed-source
if: steps.check-changes.outputs.any_changed == 'true'
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
ui/**/*.ts
@@ -80,7 +66,7 @@ jobs:
- name: Check for critical path changes (run all tests)
id: critical-changes
if: steps.check-changes.outputs.any_changed == 'true'
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
with:
files: |
ui/lib/**
@@ -92,15 +78,15 @@ jobs:
- name: Setup Node.js ${{ env.NODE_VERSION }}
if: steps.check-changes.outputs.any_changed == 'true'
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version: ${{ env.NODE_VERSION }}
- name: Setup pnpm
if: steps.check-changes.outputs.any_changed == 'true'
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4
with:
package_json_file: ui/package.json
version: 10
run_install: false
- name: Get pnpm store directory
@@ -110,7 +96,7 @@ jobs:
- name: Setup pnpm and Next.js cache
if: steps.check-changes.outputs.any_changed == 'true'
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: |
${{ env.STORE_PATH }}
-25
View File
@@ -1,25 +0,0 @@
rules:
secrets-outside-env:
ignore:
- api-bump-version.yml
- api-container-build-push.yml
- api-tests.yml
- backport.yml
- docs-bump-version.yml
- issue-triage.lock.yml
- mcp-container-build-push.yml
- pr-merged.yml
- prepare-release.yml
- sdk-bump-version.yml
- sdk-container-build-push.yml
- sdk-refresh-aws-services-regions.yml
- sdk-refresh-oci-regions.yml
- sdk-tests.yml
- ui-bump-version.yml
- ui-container-build-push.yml
- ui-e2e-tests-v2.yml
superfluous-actions:
ignore:
- pr-check-changelog.yml
- pr-conflict-checker.yml
- prepare-release.yml
-2
View File
@@ -60,7 +60,6 @@ htmlcov/
**/mcp-config.json
**/mcpServers.json
.mcp/
.mcp.json
# AI Coding Assistants - Cursor
.cursorignore
@@ -84,7 +83,6 @@ continue.json
.continuerc.json
# AI Coding Assistants - OpenCode
.opencode/
opencode.json
# AI Coding Assistants - GitHub Copilot
+2 -3
View File
@@ -70,7 +70,7 @@ repos:
args: ["--ignore=E266,W503,E203,E501,W605"]
- repo: https://github.com/python-poetry/poetry
rev: 2.3.4
rev: 2.1.1
hooks:
- id: poetry-check
name: API - poetry-check
@@ -128,8 +128,7 @@ repos:
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
# TODO: 79023 & 79027 knack ReDoS until `azure-cli-core` (via `cartography`) allows `knack` >=0.13.0
# TODO: 86217 because `alibabacloud-tea-openapi == 0.4.3` don't let us upgrade `cryptography >= 46.0.0`
# TODO: 71600 CVE-2024-1135 false positive - fixed in gunicorn 22.0.0, project uses 23.0.0
entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353,77744,77745,79023,79027,86217,71600'
entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353,77744,77745,79023,79027,86217'
language: system
- id: vulture
+1 -1
View File
@@ -13,7 +13,7 @@ build:
post_create_environment:
# Install poetry
# https://python-poetry.org/docs/#installing-manually
- python -m pip install poetry==2.3.4
- python -m pip install poetry
post_install:
# Install dependencies with 'docs' dependency group
# https://python-poetry.org/docs/managing-dependencies/#dependency-groups
+3 -3
View File
@@ -140,7 +140,7 @@ Prowler is an open-source cloud security assessment tool supporting AWS, Azure,
| Component | Location | Tech Stack |
|-----------|----------|------------|
| SDK | `prowler/` | Python 3.10+, Poetry 2.3+ |
| SDK | `prowler/` | Python 3.9+, Poetry |
| API | `api/` | Django 5.1, DRF, Celery |
| UI | `ui/` | Next.js 15, React 19, Tailwind 4 |
| MCP Server | `mcp_server/` | FastMCP, Python 3.12+ |
@@ -153,12 +153,12 @@ Prowler is an open-source cloud security assessment tool supporting AWS, Azure,
```bash
# Setup
poetry install --with dev
poetry run prek install
poetry run pre-commit install
# Code quality
poetry run make lint
poetry run make format
poetry run prek run --all-files
poetry run pre-commit run --all-files
```
---
+2 -2
View File
@@ -1,4 +1,4 @@
FROM python:3.12.11-slim-bookworm@sha256:519591d6871b7bc437060736b9f7456b8731f1499a57e22e6c285135ae657bf7 AS build
FROM python:3.12.11-slim-bookworm AS build
LABEL maintainer="https://github.com/prowler-cloud/prowler"
LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
@@ -68,7 +68,7 @@ ENV HOME='/home/prowler'
ENV PATH="${HOME}/.local/bin:${PATH}"
#hadolint ignore=DL3013
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir poetry==2.3.4
pip install --no-cache-dir poetry
RUN poetry install --compile && \
rm -rf ~/.cache/pip
+5 -17
View File
@@ -3,7 +3,7 @@
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
</p>
<p align="center">
<b><i>Prowler</b> is the Open Cloud Security Platform trusted by thousands to automate security and compliance in any cloud environment. With hundreds of ready-to-use checks and compliance frameworks, Prowler delivers real-time, customizable monitoring and seamless integrations, making cloud security simple, scalable, and cost-effective for organizations of any size.
<b><i>Prowler</b> is the Open Cloud Security platform trusted by thousands to automate security and compliance in any cloud environment. With hundreds of ready-to-use checks and compliance frameworks, Prowler delivers real-time, customizable monitoring and seamless integrations, making cloud security simple, scalable, and cost-effective for organizations of any size.
</p>
<p align="center">
<b>Secure ANY cloud at AI Speed at <a href="https://prowler.com">prowler.com</i></b>
@@ -41,7 +41,7 @@
# Description
**Prowler** is the worlds most widely used _Open-Source Cloud Security Platform_ that automates security and compliance across **any cloud environment**. With hundreds of ready-to-use security checks, remediation guidance, and compliance frameworks, Prowler is built to _“Secure ANY Cloud at AI Speed”_. Prowler delivers **AI-driven**, **customizable**, and **easy-to-use** assessments, dashboards, reports, and integrations, making cloud security **simple**, **scalable**, and **cost-effective** for organizations of any size.
**Prowler** is the worlds most widely used _open-source cloud security platform_ that automates security and compliance across **any cloud environment**. With hundreds of ready-to-use security checks, remediation guidance, and compliance frameworks, Prowler is built to _“Secure ANY cloud at AI Speed”_. Prowler delivers **AI-driven**, **customizable**, and **easy-to-use** assessments, dashboards, reports, and integrations, making cloud security **simple**, **scalable**, and **cost-effective** for organizations of any size.
Prowler includes hundreds of built-in controls to ensure compliance with standards and frameworks, including:
@@ -119,7 +119,6 @@ Every AWS provider scan will enqueue an Attack Paths ingestion job automatically
| Image | N/A | N/A | N/A | N/A | Official | CLI, API |
| Google Workspace | 1 | 1 | 0 | 1 | Official | CLI |
| OpenStack | 27 | 4 | 0 | 8 | Official | UI, API, CLI |
| Vercel | 30 | 6 | 0 | 5 | Official | CLI |
| NHN | 6 | 2 | 1 | 0 | Unofficial | CLI |
> [!Note]
@@ -240,17 +239,9 @@ pnpm start
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
**Pre-commit Hooks Setup**
Some pre-commit hooks require tools installed on your system:
1. **Install [TruffleHog](https://github.com/trufflesecurity/trufflehog#install)** (secret scanning) — see the [official installation options](https://github.com/trufflesecurity/trufflehog#install).
2. **Install [Hadolint](https://github.com/hadolint/hadolint#install)** (Dockerfile linting) — see the [official installation options](https://github.com/hadolint/hadolint#install).
## Prowler CLI
### Pip package
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/). Consequently, it can be installed using pip with Python >=3.10, <3.13:
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/). Consequently, it can be installed using pip with Python >3.9.1, <3.13:
```console
pip install prowler
@@ -282,7 +273,7 @@ The container images are available here:
### From GitHub
Python >=3.10, <3.13 is required with pip and Poetry:
Python >3.9.1, <3.13 is required with pip and Poetry:
``` console
git clone https://github.com/prowler-cloud/prowler
@@ -310,10 +301,7 @@ python prowler-cli.py -v
- **Prowler SDK**: A Python SDK designed to extend the functionality of the Prowler CLI for advanced capabilities.
- **Prowler MCP Server**: A Model Context Protocol server that provides AI tools for Lighthouse, the AI-powered security assistant. This is a critical dependency for Lighthouse functionality.
![Prowler App Architecture](docs/images/products/prowler-app-architecture.png)
<!-- Diagram source: docs/images/products/prowler-app-architecture.mmd — edit there, re-render at https://mermaid.live, and replace the PNG. -->
![Prowler App Architecture](docs/products/img/prowler-app-architecture.png)
## Prowler CLI
-113
View File
@@ -2,119 +2,6 @@
All notable changes to the **Prowler API** are documented in this file.
## [1.25.4] (Prowler v5.24.4)
### 🚀 Added
- `DJANGO_SENTRY_TRACES_SAMPLE_RATE` env var (default `0.02`) enables Sentry performance tracing for the API [(#10873)](https://github.com/prowler-cloud/prowler/pull/10873)
### 🔄 Changed
- Attack Paths: Neo4j driver `connection_acquisition_timeout` is now configurable via `NEO4J_CONN_ACQUISITION_TIMEOUT` (default lowered from 120 s to 15 s) [(#10873)](https://github.com/prowler-cloud/prowler/pull/10873)
---
## [1.25.3] (Prowler v5.24.3)
### 🚀 Added
- `/overviews/findings`, `/overviews/findings-severity` and `/overviews/services` now reflect newly-muted findings without waiting for the next scan. The post-mute `reaggregate-all-finding-group-summaries` task was extended to re-run the same per-scan pipeline that scan completion runs (`ScanSummary`, `DailySeveritySummary`, `FindingGroupDailySummary`) on the latest scan of every `(provider, day)` pair, keeping the pre-aggregated tables in sync with `Finding.muted` updates [(#10827)](https://github.com/prowler-cloud/prowler/pull/10827)
### 🐞 Fixed
- Finding groups aggregated `status` now treats muted findings as resolved: a group is `FAIL` only while at least one non-muted FAIL remains, otherwise it is `PASS` (including fully-muted groups). The `filter[status]` filter and the `sort=status` ordering share the same semantics, keeping `status` consistent with `fail_count` and the orthogonal `muted` flag [(#10825)](https://github.com/prowler-cloud/prowler/pull/10825)
- `aggregate_findings` is now idempotent: it deletes the scan's existing `ScanSummary` rows before `bulk_create`, so re-runs (such as the post-mute reaggregation pipeline) no longer violate the `unique_scan_summary` constraint and no longer abort the downstream `DailySeveritySummary` / `FindingGroupDailySummary` recomputation for the affected scan [(#10827)](https://github.com/prowler-cloud/prowler/pull/10827)
- Attack Paths: Findings on AWS were silently dropped during the Neo4j merge for resources whose Cartography node is keyed by a short identifier (e.g. EC2 instances) rather than the full ARN [(#10839)](https://github.com/prowler-cloud/prowler/pull/10839)
---
## [1.25.2] (Prowler v5.24.2)
### 🔄 Changed
- Finding groups `/resources` endpoints now materialize the filtered finding IDs into a Python list before filtering `ResourceFindingMapping`, so PostgreSQL switches from a Merge Semi Join that read hundreds of thousands of RFM index entries to a Nested Loop Index Scan over `finding_id`. The `has_mappings.exists()` pre-check is removed, and a request-scoped cache deduplicates the finding-id round-trip across the helpers that build different RFM querysets [(#10816)](https://github.com/prowler-cloud/prowler/pull/10816)
### 🐞 Fixed
- `/finding-groups/latest/<check_id>/resources` now selects the latest completed scan per provider by `-completed_at` (then `-inserted_at`) instead of `-inserted_at`, matching the `/finding-groups/latest` summary path and the daily-summary upsert so overlapping scans no longer produce diverging `delta`/`new_count` between the two endpoints [(#10802)](https://github.com/prowler-cloud/prowler/pull/10802)
---
## [1.25.1] (Prowler v5.24.1)
### 🔄 Changed
- Attack Paths: Restore `SYNC_BATCH_SIZE` and `FINDINGS_BATCH_SIZE` defaults to 1000, upgrade Cartography to 0.135.0, enable Celery queue priority for cleanup task, rewrite Finding insertion, remove AWS graph cleanup and add timing logs [(#10729)](https://github.com/prowler-cloud/prowler/pull/10729)
### 🐞 Fixed
- Finding group resources endpoints now include findings without associated resources (orphaned IaC findings) as simulated resource rows, and return one row per finding when multiple findings share a resource [(#10708)](https://github.com/prowler-cloud/prowler/pull/10708)
- Attack Paths: Missing `tenant_id` filter while getting related findings after scan completes [(#10722)](https://github.com/prowler-cloud/prowler/pull/10722)
- Finding group counters `pass_count`, `fail_count` and `manual_count` now exclude muted findings [(#10753)](https://github.com/prowler-cloud/prowler/pull/10753)
- Silent data loss in `ResourceFindingMapping` bulk insert that left findings orphaned when `INSERT ... ON CONFLICT DO NOTHING` dropped rows without raising; added explicit `unique_fields` [(#10724)](https://github.com/prowler-cloud/prowler/pull/10724)
---
## [1.25.0] (Prowler v5.24.0)
### 🔄 Changed
- Bump Poetry to `2.3.4` in Dockerfile and pre-commit hooks. Regenerate `api/poetry.lock` [(#10681)](https://github.com/prowler-cloud/prowler/pull/10681)
- Attack Paths: Remove dead `cleanup_findings` no-op and its supporting `prowler_finding_lastupdated` index [(#10684)](https://github.com/prowler-cloud/prowler/pull/10684)
### 🐞 Fixed
- Worker-beat race condition on cold start: replaced `sleep 15` with API service healthcheck dependency (Docker Compose) and init containers (Helm), aligned Gunicorn default port to `8080` [(#10603)](https://github.com/prowler-cloud/prowler/pull/10603)
- API container startup crash on Linux due to root-owned bind-mount preventing JWT key generation [(#10646)](https://github.com/prowler-cloud/prowler/pull/10646)
### 🔐 Security
- `pytest` from 8.2.2 to 9.0.3 to fix CVE-2025-71176 [(#10678)](https://github.com/prowler-cloud/prowler/pull/10678)
---
## [1.24.0] (Prowler v5.23.0)
### 🚀 Added
- RBAC role lookup filtered by `tenant_id` to prevent cross-tenant privilege leak [(#10491)](https://github.com/prowler-cloud/prowler/pull/10491)
- `VALKEY_SCHEME`, `VALKEY_USERNAME`, and `VALKEY_PASSWORD` environment variables to configure Celery broker TLS/auth connection details for Valkey/ElastiCache [(#10420)](https://github.com/prowler-cloud/prowler/pull/10420)
- `Vercel` provider support [(#10190)](https://github.com/prowler-cloud/prowler/pull/10190)
- Finding groups list and latest endpoints support `sort=delta`, ordering by `new_count` then `changed_count` so groups with the most new findings rank highest [(#10606)](https://github.com/prowler-cloud/prowler/pull/10606)
- Finding group resources endpoints (`/finding-groups/{check_id}/resources` and `/finding-groups/latest/{check_id}/resources`) now expose `finding_id` per row, pointing to the most recent matching Finding for each resource. UUIDv7 ordering guarantees `Max(finding__id)` resolves to the latest snapshot [(#10630)](https://github.com/prowler-cloud/prowler/pull/10630)
- Handle CIS and CISA SCuBA compliance framework from google workspace [(#10629)](https://github.com/prowler-cloud/prowler/pull/10629)
- Sort support for all finding group counter fields: `pass_muted_count`, `fail_muted_count`, `manual_muted_count`, and all `new_*`/`changed_*` status-mute breakdown counters [(#10655)](https://github.com/prowler-cloud/prowler/pull/10655)
### 🔄 Changed
- Finding groups list/latest/resources now expose `status``{FAIL, PASS, MANUAL}` and `muted: bool` as orthogonal fields. The aggregated `status` reflects the underlying check outcome regardless of mute state, and `muted=true` signals that every finding in the group/resource is muted. New `manual_count` is exposed alongside `pass_count`/`fail_count`, plus `pass_muted_count`/`fail_muted_count`/`manual_muted_count` siblings so clients can isolate the muted half of each status. The `new_*`/`changed_*` deltas are now broken down by status and mute state via 12 new counters (`new_fail_count`, `new_fail_muted_count`, `new_pass_count`, `new_pass_muted_count`, `new_manual_count`, `new_manual_muted_count` and the matching `changed_*` set). New `filter[muted]=true|false` and `sort=status` (FAIL > PASS > MANUAL) / `sort=muted` are supported. `filter[status]=MUTED` is no longer accepted [(#10630)](https://github.com/prowler-cloud/prowler/pull/10630)
- Attack Paths: Periodic cleanup of stale scans with dead-worker detection via Celery inspect, marking orphaned `EXECUTING` scans as `FAILED` and recovering `graph_data_ready` [(#10387)](https://github.com/prowler-cloud/prowler/pull/10387)
- Attack Paths: Replace `_provider_id` property with `_Provider_{uuid}` label for provider isolation, add regex-based label injection for custom queries [(#10402)](https://github.com/prowler-cloud/prowler/pull/10402)
### 🐞 Fixed
- `reaggregate_all_finding_group_summaries_task` now refreshes finding group daily summaries for every `(provider, day)` combination instead of only the latest scan per provider, matching the unbounded scope of `mute_historical_findings_task`. Mute rule operations no longer leave older daily summaries drifting from the underlying muted findings [(#10630)](https://github.com/prowler-cloud/prowler/pull/10630)
- Finding groups list/latest now apply computed status/severity filters and finding-level prefilters (delta, region, service, category, resource group, scan, resource type), plus `check_title` support for sort/filter consistency [(#10428)](https://github.com/prowler-cloud/prowler/pull/10428)
- Populate compliance data inside `check_metadata` for findings, which was always returned as `null` [(#10449)](https://github.com/prowler-cloud/prowler/pull/10449)
- 403 error for admin users listing tenants due to roles query not using the admin database connection [(#10460)](https://github.com/prowler-cloud/prowler/pull/10460)
- Filter transient Neo4j defunct connection logs in Sentry `before_send` to suppress false-positive alerts handled by `RetryableSession` retries [(#10452)](https://github.com/prowler-cloud/prowler/pull/10452)
- `MANAGE_ACCOUNT` permission no longer required for listing and creating tenants [(#10468)](https://github.com/prowler-cloud/prowler/pull/10468)
- Finding groups muted filter, counters, metadata extraction and mute reaggregation [(#10477)](https://github.com/prowler-cloud/prowler/pull/10477)
- Finding groups `check_title__icontains` resolution, `name__icontains` resource filter and `resource_group` field in `/resources` response [(#10486)](https://github.com/prowler-cloud/prowler/pull/10486)
- Membership `post_delete` signal using raw FK ids to avoid `DoesNotExist` during cascade deletions [(#10497)](https://github.com/prowler-cloud/prowler/pull/10497)
- Finding group resources endpoints returning false 404 when filters match no results, and `sort` parameter being ignored [(#10510)](https://github.com/prowler-cloud/prowler/pull/10510)
- Jira integration failing with `JiraInvalidIssueTypeError` on non-English Jira instances due to hardcoded `"Task"` issue type; now dynamically fetches available issue types per project [(#10534)](https://github.com/prowler-cloud/prowler/pull/10534)
- Finding group `first_seen_at` now reflects when a new finding appeared in the scan instead of the oldest carry-forward date across all unchanged findings [(#10595)](https://github.com/prowler-cloud/prowler/pull/10595)
- Attack Paths: Remove `clear_cache` call from read-only query endpoints; cache clearing belongs to the scan/ingestion flow, not API queries [(#10586)](https://github.com/prowler-cloud/prowler/pull/10586)
### 🔐 Security
- Pin all unpinned dependencies to exact versions to prevent supply chain attacks and ensure reproducible builds [(#10469)](https://github.com/prowler-cloud/prowler/pull/10469)
- `authlib` bumped from 1.6.6 to 1.6.9 to fix CVE-2026-28802 (JWT `alg: none` validation bypass) [(#10579)](https://github.com/prowler-cloud/prowler/pull/10579)
- `aiohttp` bumped from 3.13.3 to 3.13.5 to fix CVE-2026-34520 (the C parser accepted null bytes and control characters in response headers) [(#10538)](https://github.com/prowler-cloud/prowler/pull/10538)
---
## [1.23.0] (Prowler v5.22.0)
### 🚀 Added
+2 -2
View File
@@ -1,4 +1,4 @@
FROM python:3.12.10-slim-bookworm@sha256:fd95fa221297a88e1cf49c55ec1828edd7c5a428187e67b5d1805692d11588db AS build
FROM python:3.12.10-slim-bookworm AS build
LABEL maintainer="https://github.com/prowler-cloud/api"
@@ -71,7 +71,7 @@ RUN mkdir -p /tmp/prowler_api_output
COPY pyproject.toml ./
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir poetry==2.3.4
pip install --no-cache-dir poetry
ENV PATH="/home/prowler/.local/bin:$PATH"
+2 -20
View File
@@ -30,32 +30,14 @@ start_prod_server() {
poetry run gunicorn -c config/guniconf.py config.wsgi:application
}
resolve_worker_hostname() {
TASK_ID=""
if [ -n "$ECS_CONTAINER_METADATA_URI_V4" ]; then
TASK_ID=$(wget -qO- --timeout=2 "${ECS_CONTAINER_METADATA_URI_V4}/task" | \
python3 -c "import sys,json; print(json.load(sys.stdin)['TaskARN'].split('/')[-1])" 2>/dev/null)
fi
if [ -z "$TASK_ID" ]; then
TASK_ID=$(python3 -c "import uuid; print(uuid.uuid4().hex)")
fi
echo "${TASK_ID}@$(hostname)"
}
start_worker() {
echo "Starting the worker..."
poetry run python -m celery -A config.celery worker \
-n "$(resolve_worker_hostname)" \
-l "${DJANGO_LOGGING_LEVEL:-info}" \
-Q celery,scans,scan-reports,deletion,backfill,overview,integrations,compliance,attack-paths-scans \
-E --max-tasks-per-child 1
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview,integrations,compliance,attack-paths-scans -E --max-tasks-per-child 1
}
start_worker_beat() {
echo "Starting the worker-beat..."
sleep 15
poetry run python -m celery -A config.celery beat -l "${DJANGO_LOGGING_LEVEL:-info}" --scheduler django_celery_beat.schedulers:DatabaseScheduler
}
+719 -411
View File
File diff suppressed because it is too large Load Diff
+23 -23
View File
@@ -5,44 +5,44 @@ requires = ["poetry-core"]
[project]
authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
dependencies = [
"celery (==5.6.2)",
"celery (>=5.4.0,<6.0.0)",
"dj-rest-auth[with_social,jwt] (==7.0.1)",
"django (==5.1.15)",
"django-allauth[saml] (==65.15.0)",
"django-celery-beat (==2.9.0)",
"django-celery-results (==2.6.0)",
"django-allauth[saml] (>=65.13.0,<66.0.0)",
"django-celery-beat (>=2.7.0,<3.0.0)",
"django-celery-results (>=2.5.1,<3.0.0)",
"django-cors-headers==4.4.0",
"django-environ==0.11.2",
"django-filter==24.3",
"django-guid==3.5.0",
"django-postgres-extra (==2.0.9)",
"django-postgres-extra (>=2.0.8,<3.0.0)",
"djangorestframework==3.15.2",
"djangorestframework-jsonapi==7.0.2",
"djangorestframework-simplejwt (==5.5.1)",
"drf-nested-routers (==0.95.0)",
"djangorestframework-simplejwt (>=5.3.1,<6.0.0)",
"drf-nested-routers (>=0.94.1,<1.0.0)",
"drf-spectacular==0.27.2",
"drf-spectacular-jsonapi==0.5.1",
"defusedxml==0.7.1",
"gunicorn==23.0.0",
"lxml==5.3.2",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.24",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.22",
"psycopg2-binary==2.9.9",
"pytest-celery[redis] (==1.3.0)",
"sentry-sdk[django] (==2.56.0)",
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
"uuid6==2024.7.10",
"openai (==1.109.1)",
"openai (>=1.82.0,<2.0.0)",
"xmlsec==1.3.14",
"h2 (==4.3.0)",
"markdown (==3.10.2)",
"markdown (>=3.9,<4.0)",
"drf-simple-apikey (==2.2.1)",
"matplotlib (==3.10.8)",
"reportlab (==4.4.10)",
"neo4j (==6.1.0)",
"cartography (==0.135.0)",
"gevent (==25.9.1)",
"werkzeug (==3.1.7)",
"sqlparse (==0.5.5)",
"fonttools (==4.62.1)"
"matplotlib (>=3.10.6,<4.0.0)",
"reportlab (>=4.4.4,<5.0.0)",
"neo4j (>=6.0.0,<7.0.0)",
"cartography (==0.132.0)",
"gevent (>=25.9.1,<26.0.0)",
"werkzeug (>=3.1.4)",
"sqlparse (>=0.5.4)",
"fonttools (>=4.60.2)"
]
description = "Prowler's API (Django/DRF)"
license = "Apache-2.0"
@@ -50,7 +50,7 @@ name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.25.5"
version = "1.23.1"
[project.scripts]
celery = "src.backend.config.settings.celery"
@@ -62,9 +62,10 @@ django-silk = "5.3.2"
docker = "7.1.0"
filelock = "3.20.3"
freezegun = "1.5.1"
marshmallow = ">=3.15.0,<4.0.0"
mypy = "1.10.1"
pylint = "3.2.5"
pytest = "9.0.3"
pytest = "8.2.2"
pytest-cov = "5.0.0"
pytest-django = "4.8.0"
pytest-env = "1.1.3"
@@ -74,4 +75,3 @@ ruff = "0.5.0"
safety = "3.7.0"
tqdm = "4.67.1"
vulture = "2.14"
prek = "0.3.9"
@@ -1,170 +0,0 @@
"""
Cypher sanitizer for custom (user-supplied) Attack Paths queries.
Two responsibilities:
1. **Validation** - reject queries containing SSRF or dangerous procedure
patterns (defense-in-depth; the primary control is ``neo4j.READ_ACCESS``).
2. **Provider-scoped label injection** - inject a dynamic
``_Provider_{uuid}`` label into every node pattern so the database can
use its native label index for provider isolation.
Label-injection pipeline:
1. **Protect** string literals and line comments (placeholder replacement).
2. **Split** by top-level clause keywords to track clause context.
3. **Pass A** - inject into *labeled* node patterns in ALL segments.
4. **Pass B** - inject into *bare* node patterns in MATCH segments only.
5. **Restore** protected regions.
"""
import re
from rest_framework.exceptions import ValidationError
from tasks.jobs.attack_paths.config import get_provider_label
# Step 1 - String / comment protection
# Single combined regex: strings first, then line comments.
# The regex engine finds the leftmost match, so a string like 'https://prowler.com'
# is consumed as a string before the // inside it can match as a comment.
_PROTECTED_RE = re.compile(r"'(?:[^'\\]|\\.)*'|\"(?:[^\"\\]|\\.)*\"|//[^\n]*")
# Step 2 - Clause splitting
# OPTIONAL MATCH must come before MATCH to avoid partial matching.
_CLAUSE_RE = re.compile(
r"\b(OPTIONAL\s+MATCH|MATCH|WHERE|RETURN|WITH|ORDER\s+BY"
r"|SKIP|LIMIT|UNION|UNWIND|CALL)\b",
re.IGNORECASE,
)
# Pass A - Labeled node patterns (all segments)
# Matches node patterns that have at least one :Label.
# (?<!\w)\( - open paren NOT preceded by a word char (excludes function calls).
# Group 1: optional variable + one or more :Label
# Group 2: optional {properties} + closing paren
_LABELED_NODE_RE = re.compile(
r"(?<!\w)\("
r"("
r"\s*(?:[a-zA-Z_]\w*)?"
r"(?:\s*:\s*(?:`[^`]*`|[a-zA-Z_]\w*))+"
r")"
r"("
r"\s*(?:\{[^}]*\})?"
r"\s*\)"
r")"
)
# Pass B - Bare node patterns (MATCH segments only)
# Matches (identifier) or (identifier {properties}) without any :Label.
# Only applied in MATCH/OPTIONAL MATCH segments.
_BARE_NODE_RE = re.compile(
r"(?<!\w)\(" r"(\s*[a-zA-Z_]\w*)" r"(\s*(?:\{[^}]*\})?)" r"\s*\)"
)
_MATCH_CLAUSES = frozenset({"MATCH", "OPTIONAL MATCH"})
def _inject_labeled(segment: str, label: str) -> str:
"""Inject provider label into all node patterns that have existing labels."""
return _LABELED_NODE_RE.sub(rf"(\1:{label}\2", segment)
def _inject_bare(segment: str, label: str) -> str:
"""Inject provider label into bare `(identifier)` node patterns."""
def _replace(match):
var = match.group(1)
props = match.group(2).strip()
if props:
return f"({var}:{label} {props})"
return f"({var}:{label})"
return _BARE_NODE_RE.sub(_replace, segment)
def inject_provider_label(cypher: str, provider_id: str) -> str:
"""Rewrite a Cypher query to scope every node pattern to a provider.
Args:
cypher: The original Cypher query string.
provider_id: The provider UUID (will be converted to a label via
`get_provider_label`).
Returns:
The rewritten Cypher with `:_Provider_{uuid}` appended to every
node pattern.
"""
label = get_provider_label(provider_id)
# Step 1: Protect strings and comments (single pass, leftmost-first)
protected: list[str] = []
def _save(match):
protected.append(match.group(0))
return f"\x00P{len(protected) - 1}\x00"
work = _PROTECTED_RE.sub(_save, cypher)
# Step 2: Split by clause keywords
parts = _CLAUSE_RE.split(work)
# Steps 3-4: Apply injection passes per segment
result: list[str] = []
current_clause: str | None = None
for i, part in enumerate(parts):
if i % 2 == 1:
# Keyword token - normalize for clause tracking
current_clause = re.sub(r"\s+", " ", part.strip()).upper()
result.append(part)
else:
# Content segment - apply injection based on clause context
part = _inject_labeled(part, label)
if current_clause in _MATCH_CLAUSES:
part = _inject_bare(part, label)
result.append(part)
work = "".join(result)
# Step 5: Restore protected regions
for i, original in enumerate(protected):
work = work.replace(f"\x00P{i}\x00", original)
return work
# ---------------------------------------------------------------------------
# Validation
# ---------------------------------------------------------------------------
# Patterns that indicate SSRF or dangerous procedure calls
# Defense-in-depth layer - the primary control is `neo4j.READ_ACCESS`
_BLOCKED_PATTERNS = [
re.compile(r"\bLOAD\s+CSV\b", re.IGNORECASE),
re.compile(r"\bapoc\.load\b", re.IGNORECASE),
re.compile(r"\bapoc\.import\b", re.IGNORECASE),
re.compile(r"\bapoc\.export\b", re.IGNORECASE),
re.compile(r"\bapoc\.cypher\b", re.IGNORECASE),
re.compile(r"\bapoc\.systemdb\b", re.IGNORECASE),
re.compile(r"\bapoc\.config\b", re.IGNORECASE),
re.compile(r"\bapoc\.periodic\b", re.IGNORECASE),
re.compile(r"\bapoc\.do\b", re.IGNORECASE),
re.compile(r"\bapoc\.trigger\b", re.IGNORECASE),
re.compile(r"\bapoc\.custom\b", re.IGNORECASE),
]
def validate_custom_query(cypher: str) -> None:
"""Reject queries containing known SSRF or dangerous procedure patterns.
Raises ValidationError if a blocked pattern is found.
String literals and comments are stripped before matching to avoid
false positives.
"""
stripped = _PROTECTED_RE.sub("", cypher)
for pattern in _BLOCKED_PATTERNS:
if pattern.search(stripped):
raise ValidationError({"query": "Query contains a blocked operation"})
+14 -9
View File
@@ -11,8 +11,8 @@ from config.env import env
from django.conf import settings
from tasks.jobs.attack_paths.config import (
BATCH_SIZE,
PROVIDER_ID_PROPERTY,
PROVIDER_RESOURCE_LABEL,
get_provider_label,
)
from api.attack_paths.retryable_session import RetryableSession
@@ -28,7 +28,6 @@ READ_QUERY_TIMEOUT_SECONDS = env.int(
"ATTACK_PATHS_READ_QUERY_TIMEOUT_SECONDS", default=30
)
MAX_CUSTOM_QUERY_NODES = env.int("ATTACK_PATHS_MAX_CUSTOM_QUERY_NODES", default=250)
CONN_ACQUISITION_TIMEOUT = env.int("NEO4J_CONN_ACQUISITION_TIMEOUT", default=15)
READ_EXCEPTION_CODES = [
"Neo.ClientError.Statement.AccessMode",
"Neo.ClientError.Procedure.ProcedureNotFound",
@@ -63,7 +62,7 @@ def init_driver() -> neo4j.Driver:
auth=(config["USER"], config["PASSWORD"]),
keep_alive=True,
max_connection_lifetime=7200,
connection_acquisition_timeout=CONN_ACQUISITION_TIMEOUT,
connection_acquisition_timeout=120,
max_connection_pool_size=50,
)
_driver.verify_connectivity()
@@ -164,8 +163,11 @@ def drop_subgraph(database: str, provider_id: str) -> int:
Uses batched deletion to avoid memory issues with large graphs.
Silently returns 0 if the database doesn't exist.
"""
provider_label = get_provider_label(provider_id)
deleted_nodes = 0
parameters = {
"provider_id": provider_id,
"batch_size": BATCH_SIZE,
}
try:
with get_session(database) as session:
@@ -173,12 +175,12 @@ def drop_subgraph(database: str, provider_id: str) -> int:
while deleted_count > 0:
result = session.run(
f"""
MATCH (n:{PROVIDER_RESOURCE_LABEL}:`{provider_label}`)
MATCH (n:{PROVIDER_RESOURCE_LABEL} {{{PROVIDER_ID_PROPERTY}: $provider_id}})
WITH n LIMIT $batch_size
DETACH DELETE n
RETURN COUNT(n) AS deleted_nodes_count
""",
{"batch_size": BATCH_SIZE},
parameters,
)
deleted_count = result.single().get("deleted_nodes_count", 0)
deleted_nodes += deleted_count
@@ -197,12 +199,15 @@ def has_provider_data(database: str, provider_id: str) -> bool:
Returns `False` if the database doesn't exist.
"""
provider_label = get_provider_label(provider_id)
query = f"MATCH (n:{PROVIDER_RESOURCE_LABEL}:`{provider_label}`) RETURN 1 LIMIT 1"
query = (
f"MATCH (n:{PROVIDER_RESOURCE_LABEL} "
f"{{{PROVIDER_ID_PROPERTY}: $provider_id}}) "
"RETURN 1 LIMIT 1"
)
try:
with get_session(database, default_access_mode=neo4j.READ_ACCESS) as session:
result = session.run(query)
result = session.run(query, {"provider_id": provider_id})
return result.single() is not None
except GraphDatabaseQueryException as exc:
+97 -85
View File
@@ -3,7 +3,7 @@ from api.attack_paths.queries.types import (
AttackPathsQueryDefinition,
AttackPathsQueryParameterDefinition,
)
from tasks.jobs.attack_paths.config import PROWLER_FINDING_LABEL
from tasks.jobs.attack_paths.config import PROVIDER_ID_PROPERTY, PROWLER_FINDING_LABEL
# Custom Attack Path Queries
@@ -16,6 +16,8 @@ AWS_INTERNET_EXPOSED_EC2_SENSITIVE_S3_ACCESS = AttackPathsQueryDefinition(
description="Detect EC2 instances with SSH exposed to the internet that can assume higher-privileged roles to read tagged sensitive S3 buckets despite bucket-level public access blocks.",
provider="aws",
cypher=f"""
OPTIONAL MATCH (internet:Internet {{{PROVIDER_ID_PROPERTY}: $provider_id}})
MATCH path_s3 = (aws:AWSAccount {{id: $provider_uid}})--(s3:S3Bucket)--(t:AWSTag)
WHERE toLower(t.key) = toLower($tag_key) AND toLower(t.value) = toLower($tag_value)
@@ -29,7 +31,7 @@ AWS_INTERNET_EXPOSED_EC2_SENSITIVE_S3_ACCESS = AttackPathsQueryDefinition(
MATCH path_assume_role = (ec2)-[p:STS_ASSUMEROLE_ALLOW*1..9]-(r:AWSRole)
OPTIONAL MATCH (internet:Internet)-[can_access:CAN_ACCESS]->(ec2)
OPTIONAL MATCH (internet)-[can_access:CAN_ACCESS]->(ec2)
WITH collect(path_s3) + collect(path_ec2) + collect(path_role) + collect(path_assume_role) AS paths,
head(collect(internet)) AS internet, collect(can_access) AS can_access
@@ -38,7 +40,7 @@ AWS_INTERNET_EXPOSED_EC2_SENSITIVE_S3_ACCESS = AttackPathsQueryDefinition(
WITH paths, internet, can_access, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access
""",
@@ -77,7 +79,7 @@ AWS_RDS_INSTANCES = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -100,7 +102,7 @@ AWS_RDS_UNENCRYPTED_STORAGE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -123,7 +125,7 @@ AWS_S3_ANONYMOUS_ACCESS_BUCKETS = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -147,7 +149,7 @@ AWS_IAM_STATEMENTS_ALLOW_ALL_ACTIONS = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -171,7 +173,7 @@ AWS_IAM_STATEMENTS_ALLOW_DELETE_POLICY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -195,7 +197,7 @@ AWS_IAM_STATEMENTS_ALLOW_CREATE_ACTIONS = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -213,10 +215,12 @@ AWS_EC2_INSTANCES_INTERNET_EXPOSED = AttackPathsQueryDefinition(
description="Find EC2 instances flagged as exposed to the internet within the selected account.",
provider="aws",
cypher=f"""
OPTIONAL MATCH (internet:Internet {{{PROVIDER_ID_PROPERTY}: $provider_id}})
MATCH path = (aws:AWSAccount {{id: $provider_uid}})--(ec2:EC2Instance)
WHERE ec2.exposed_internet = true
OPTIONAL MATCH (internet:Internet)-[can_access:CAN_ACCESS]->(ec2)
OPTIONAL MATCH (internet)-[can_access:CAN_ACCESS]->(ec2)
WITH collect(path) AS paths, head(collect(internet)) AS internet, collect(can_access) AS can_access
UNWIND paths AS p
@@ -224,7 +228,7 @@ AWS_EC2_INSTANCES_INTERNET_EXPOSED = AttackPathsQueryDefinition(
WITH paths, internet, can_access, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access
""",
@@ -238,11 +242,13 @@ AWS_SECURITY_GROUPS_OPEN_INTERNET_FACING = AttackPathsQueryDefinition(
description="Find internet-facing resources associated with security groups that allow inbound access from '0.0.0.0/0'.",
provider="aws",
cypher=f"""
OPTIONAL MATCH (internet:Internet {{{PROVIDER_ID_PROPERTY}: $provider_id}})
MATCH path = (aws:AWSAccount {{id: $provider_uid}})--(ec2:EC2Instance)--(sg:EC2SecurityGroup)--(ipi:IpPermissionInbound)--(ir:IpRange)
WHERE ec2.exposed_internet = true
AND ir.range = "0.0.0.0/0"
OPTIONAL MATCH (internet:Internet)-[can_access:CAN_ACCESS]->(ec2)
OPTIONAL MATCH (internet)-[can_access:CAN_ACCESS]->(ec2)
WITH collect(path) AS paths, head(collect(internet)) AS internet, collect(can_access) AS can_access
UNWIND paths AS p
@@ -250,7 +256,7 @@ AWS_SECURITY_GROUPS_OPEN_INTERNET_FACING = AttackPathsQueryDefinition(
WITH paths, internet, can_access, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access
""",
@@ -264,10 +270,12 @@ AWS_CLASSIC_ELB_INTERNET_EXPOSED = AttackPathsQueryDefinition(
description="Find Classic Load Balancers exposed to the internet along with their listeners.",
provider="aws",
cypher=f"""
OPTIONAL MATCH (internet:Internet {{{PROVIDER_ID_PROPERTY}: $provider_id}})
MATCH path = (aws:AWSAccount {{id: $provider_uid}})--(elb:LoadBalancer)--(listener:ELBListener)
WHERE elb.exposed_internet = true
OPTIONAL MATCH (internet:Internet)-[can_access:CAN_ACCESS]->(elb)
OPTIONAL MATCH (internet)-[can_access:CAN_ACCESS]->(elb)
WITH collect(path) AS paths, head(collect(internet)) AS internet, collect(can_access) AS can_access
UNWIND paths AS p
@@ -275,7 +283,7 @@ AWS_CLASSIC_ELB_INTERNET_EXPOSED = AttackPathsQueryDefinition(
WITH paths, internet, can_access, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access
""",
@@ -289,10 +297,12 @@ AWS_ELBV2_INTERNET_EXPOSED = AttackPathsQueryDefinition(
description="Find ELBv2 load balancers exposed to the internet along with their listeners.",
provider="aws",
cypher=f"""
OPTIONAL MATCH (internet:Internet {{{PROVIDER_ID_PROPERTY}: $provider_id}})
MATCH path = (aws:AWSAccount {{id: $provider_uid}})--(elbv2:LoadBalancerV2)--(listener:ELBV2Listener)
WHERE elbv2.exposed_internet = true
OPTIONAL MATCH (internet:Internet)-[can_access:CAN_ACCESS]->(elbv2)
OPTIONAL MATCH (internet)-[can_access:CAN_ACCESS]->(elbv2)
WITH collect(path) AS paths, head(collect(internet)) AS internet, collect(can_access) AS can_access
UNWIND paths AS p
@@ -300,7 +310,7 @@ AWS_ELBV2_INTERNET_EXPOSED = AttackPathsQueryDefinition(
WITH paths, internet, can_access, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access
""",
@@ -314,13 +324,15 @@ AWS_PUBLIC_IP_RESOURCE_LOOKUP = AttackPathsQueryDefinition(
description="Given a public IP address, find the related AWS resource and its adjacent node within the selected account.",
provider="aws",
cypher=f"""
OPTIONAL MATCH (internet:Internet {{{PROVIDER_ID_PROPERTY}: $provider_id}})
MATCH path = (aws:AWSAccount {{id: $provider_uid}})-[r]-(x)-[q]-(y)
WHERE (x:EC2PrivateIp AND x.public_ip = $ip)
OR (x:EC2Instance AND x.publicipaddress = $ip)
OR (x:NetworkInterface AND x.public_ip = $ip)
OR (x:ElasticIPAddress AND x.public_ip = $ip)
OPTIONAL MATCH (internet:Internet)-[can_access:CAN_ACCESS]->(x)
OPTIONAL MATCH (internet)-[can_access:CAN_ACCESS]->(x)
WITH collect(path) AS paths, head(collect(internet)) AS internet, collect(can_access) AS can_access
UNWIND paths AS p
@@ -328,7 +340,7 @@ AWS_PUBLIC_IP_RESOURCE_LOOKUP = AttackPathsQueryDefinition(
WITH paths, internet, can_access, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access
""",
@@ -391,7 +403,7 @@ AWS_APPRUNNER_PRIVESC_PASSROLE_CREATE_SERVICE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -429,7 +441,7 @@ AWS_APPRUNNER_PRIVESC_UPDATE_SERVICE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -499,7 +511,7 @@ AWS_BEDROCK_PRIVESC_PASSROLE_CODE_INTERPRETER = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -546,7 +558,7 @@ AWS_BEDROCK_PRIVESC_INVOKE_CODE_INTERPRETER = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -598,7 +610,7 @@ AWS_CLOUDFORMATION_PRIVESC_PASSROLE_CREATE_STACK = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -636,7 +648,7 @@ AWS_CLOUDFORMATION_PRIVESC_UPDATE_STACK = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -697,7 +709,7 @@ AWS_CLOUDFORMATION_PRIVESC_PASSROLE_CREATE_STACKSET = AttackPathsQueryDefinition
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -749,7 +761,7 @@ AWS_CLOUDFORMATION_PRIVESC_PASSROLE_UPDATE_STACKSET = AttackPathsQueryDefinition
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -796,7 +808,7 @@ AWS_CLOUDFORMATION_PRIVESC_CHANGESET = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -857,7 +869,7 @@ AWS_CODEBUILD_PRIVESC_PASSROLE_CREATE_PROJECT = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -895,7 +907,7 @@ AWS_CODEBUILD_PRIVESC_START_BUILD = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -933,7 +945,7 @@ AWS_CODEBUILD_PRIVESC_START_BUILD_BATCH = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -994,7 +1006,7 @@ AWS_CODEBUILD_PRIVESC_PASSROLE_CREATE_PROJECT_BATCH = AttackPathsQueryDefinition
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1065,7 +1077,7 @@ AWS_DATAPIPELINE_PRIVESC_PASSROLE_CREATE_PIPELINE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1117,7 +1129,7 @@ AWS_EC2_PRIVESC_PASSROLE_IAM = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1173,7 +1185,7 @@ AWS_EC2_PRIVESC_MODIFY_INSTANCE_ATTRIBUTE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1225,7 +1237,7 @@ AWS_EC2_PRIVESC_PASSROLE_SPOT_INSTANCES = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1272,7 +1284,7 @@ AWS_EC2_PRIVESC_LAUNCH_TEMPLATE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1310,7 +1322,7 @@ AWS_EC2INSTANCECONNECT_PRIVESC_SEND_SSH_PUBLIC_KEY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1380,7 +1392,7 @@ AWS_ECS_PRIVESC_PASSROLE_CREATE_SERVICE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1450,7 +1462,7 @@ AWS_ECS_PRIVESC_PASSROLE_RUN_TASK = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1511,7 +1523,7 @@ AWS_ECS_PRIVESC_PASSROLE_CREATE_SERVICE_EXISTING_CLUSTER = AttackPathsQueryDefin
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1572,7 +1584,7 @@ AWS_ECS_PRIVESC_PASSROLE_RUN_TASK_EXISTING_CLUSTER = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1633,7 +1645,7 @@ AWS_ECS_PRIVESC_PASSROLE_START_TASK_EXISTING_CLUSTER = AttackPathsQueryDefinitio
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1680,7 +1692,7 @@ AWS_ECS_PRIVESC_EXECUTE_COMMAND = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1732,7 +1744,7 @@ AWS_GLUE_PRIVESC_PASSROLE_DEV_ENDPOINT = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1770,7 +1782,7 @@ AWS_GLUE_PRIVESC_UPDATE_DEV_ENDPOINT = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1831,7 +1843,7 @@ AWS_GLUE_PRIVESC_PASSROLE_CREATE_JOB = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1892,7 +1904,7 @@ AWS_GLUE_PRIVESC_PASSROLE_CREATE_JOB_TRIGGER = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1953,7 +1965,7 @@ AWS_GLUE_PRIVESC_PASSROLE_UPDATE_JOB = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2014,7 +2026,7 @@ AWS_GLUE_PRIVESC_PASSROLE_UPDATE_JOB_TRIGGER = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2057,7 +2069,7 @@ AWS_IAM_PRIVESC_CREATE_POLICY_VERSION = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2100,7 +2112,7 @@ AWS_IAM_PRIVESC_CREATE_ACCESS_KEY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2157,7 +2169,7 @@ AWS_IAM_PRIVESC_DELETE_CREATE_ACCESS_KEY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2200,7 +2212,7 @@ AWS_IAM_PRIVESC_CREATE_LOGIN_PROFILE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2240,7 +2252,7 @@ AWS_IAM_PRIVESC_PUT_ROLE_POLICY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2283,7 +2295,7 @@ AWS_IAM_PRIVESC_UPDATE_LOGIN_PROFILE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2323,7 +2335,7 @@ AWS_IAM_PRIVESC_PUT_USER_POLICY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2363,7 +2375,7 @@ AWS_IAM_PRIVESC_ATTACH_USER_POLICY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2403,7 +2415,7 @@ AWS_IAM_PRIVESC_ATTACH_ROLE_POLICY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2446,7 +2458,7 @@ AWS_IAM_PRIVESC_ATTACH_GROUP_POLICY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2489,7 +2501,7 @@ AWS_IAM_PRIVESC_PUT_GROUP_POLICY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2532,7 +2544,7 @@ AWS_IAM_PRIVESC_UPDATE_ASSUME_ROLE_POLICY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2575,7 +2587,7 @@ AWS_IAM_PRIVESC_ADD_USER_TO_GROUP = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2618,7 +2630,7 @@ AWS_IAM_PRIVESC_ATTACH_ROLE_POLICY_ASSUME_ROLE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2675,7 +2687,7 @@ AWS_IAM_PRIVESC_ATTACH_USER_POLICY_CREATE_ACCESS_KEY = AttackPathsQueryDefinitio
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2719,7 +2731,7 @@ AWS_IAM_PRIVESC_CREATE_POLICY_VERSION_ASSUME_ROLE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2762,7 +2774,7 @@ AWS_IAM_PRIVESC_PUT_ROLE_POLICY_ASSUME_ROLE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2819,7 +2831,7 @@ AWS_IAM_PRIVESC_PUT_USER_POLICY_CREATE_ACCESS_KEY = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2876,7 +2888,7 @@ AWS_IAM_PRIVESC_ATTACH_ROLE_POLICY_UPDATE_ASSUME_ROLE = AttackPathsQueryDefiniti
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2934,7 +2946,7 @@ AWS_IAM_PRIVESC_CREATE_POLICY_VERSION_UPDATE_ASSUME_ROLE = AttackPathsQueryDefin
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -2991,7 +3003,7 @@ AWS_IAM_PRIVESC_PUT_ROLE_POLICY_UPDATE_ASSUME_ROLE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3052,7 +3064,7 @@ AWS_LAMBDA_PRIVESC_PASSROLE_CREATE_FUNCTION = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3113,7 +3125,7 @@ AWS_LAMBDA_PRIVESC_PASSROLE_CREATE_FUNCTION_EVENT_SOURCE = AttackPathsQueryDefin
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3156,7 +3168,7 @@ AWS_LAMBDA_PRIVESC_UPDATE_FUNCTION_CODE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3213,7 +3225,7 @@ AWS_LAMBDA_PRIVESC_UPDATE_FUNCTION_CODE_INVOKE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3270,7 +3282,7 @@ AWS_LAMBDA_PRIVESC_UPDATE_FUNCTION_CODE_ADD_PERMISSION = AttackPathsQueryDefinit
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3331,7 +3343,7 @@ AWS_LAMBDA_PRIVESC_PASSROLE_CREATE_FUNCTION_ADD_PERMISSION = AttackPathsQueryDef
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3383,7 +3395,7 @@ AWS_SAGEMAKER_PRIVESC_PASSROLE_CREATE_NOTEBOOK = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3435,7 +3447,7 @@ AWS_SAGEMAKER_PRIVESC_PASSROLE_CREATE_TRAINING_JOB = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3487,7 +3499,7 @@ AWS_SAGEMAKER_PRIVESC_PASSROLE_CREATE_PROCESSING_JOB = AttackPathsQueryDefinitio
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3530,7 +3542,7 @@ AWS_SAGEMAKER_PRIVESC_PRESIGNED_NOTEBOOK_URL = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3600,7 +3612,7 @@ AWS_SAGEMAKER_PRIVESC_LIFECYCLE_CONFIG_NOTEBOOK = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3638,7 +3650,7 @@ AWS_SSM_PRIVESC_START_SESSION = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3676,7 +3688,7 @@ AWS_SSM_PRIVESC_SEND_COMMAND = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -3719,7 +3731,7 @@ AWS_STS_PRIVESC_ASSUME_ROLE = AttackPathsQueryDefinition(
WITH paths, collect(DISTINCT n) AS unique_nodes
UNWIND unique_nodes AS n
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL'}})
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
RETURN paths, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
""",
@@ -1,18 +1,13 @@
from tasks.jobs.attack_paths.config import PROVIDER_RESOURCE_LABEL, get_provider_label
def get_cartography_schema_query(provider_id: str) -> str:
"""Build the Cartography schema metadata query scoped to a provider label."""
provider_label = get_provider_label(provider_id)
return f"""
MATCH (n:{PROVIDER_RESOURCE_LABEL}:`{provider_label}`)
WHERE n._module_name STARTS WITH 'cartography:'
AND NOT n._module_name IN ['cartography:ontology', 'cartography:prowler']
AND n._module_version IS NOT NULL
RETURN n._module_name AS module_name, n._module_version AS module_version
LIMIT 1
"""
from tasks.jobs.attack_paths.config import PROVIDER_ID_PROPERTY, PROVIDER_RESOURCE_LABEL
CARTOGRAPHY_SCHEMA_METADATA = f"""
MATCH (n:{PROVIDER_RESOURCE_LABEL} {{{PROVIDER_ID_PROPERTY}: $provider_id}})
WHERE n._module_name STARTS WITH 'cartography:'
AND NOT n._module_name IN ['cartography:ontology', 'cartography:prowler']
AND n._module_version IS NOT NULL
RETURN n._module_name AS module_name, n._module_version AS module_version
LIMIT 1
"""
GITHUB_SCHEMA_URL = (
"https://github.com/cartography-cncf/cartography/blob/"
@@ -1,26 +1,22 @@
import logging
import re
from typing import Any, Iterable
import neo4j
from rest_framework.exceptions import APIException, PermissionDenied, ValidationError
from api.attack_paths import database as graph_database, AttackPathsQueryDefinition
from api.attack_paths.cypher_sanitizer import (
inject_provider_label,
validate_custom_query,
)
from api.attack_paths.queries.schema import (
CARTOGRAPHY_SCHEMA_METADATA,
GITHUB_SCHEMA_URL,
RAW_SCHEMA_URL,
get_cartography_schema_query,
)
from config.custom_logging import BackendLogger
from tasks.jobs.attack_paths.config import (
INTERNAL_LABELS,
INTERNAL_PROPERTIES,
get_provider_label,
PROVIDER_ID_PROPERTY,
is_dynamic_isolation_label,
)
@@ -76,6 +72,7 @@ def prepare_parameters(
clean_parameters = {
"provider_uid": str(provider_uid),
"provider_id": str(provider_id),
}
for definition_parameter in definition.parameters:
@@ -126,6 +123,38 @@ def execute_query(
# Custom query helpers
# Patterns that indicate SSRF or dangerous procedure calls
# Defense-in-depth layer - the primary control is `neo4j.READ_ACCESS`
_BLOCKED_PATTERNS = [
re.compile(r"\bLOAD\s+CSV\b", re.IGNORECASE),
re.compile(r"\bapoc\.load\b", re.IGNORECASE),
re.compile(r"\bapoc\.import\b", re.IGNORECASE),
re.compile(r"\bapoc\.export\b", re.IGNORECASE),
re.compile(r"\bapoc\.cypher\b", re.IGNORECASE),
re.compile(r"\bapoc\.systemdb\b", re.IGNORECASE),
re.compile(r"\bapoc\.config\b", re.IGNORECASE),
re.compile(r"\bapoc\.periodic\b", re.IGNORECASE),
re.compile(r"\bapoc\.do\b", re.IGNORECASE),
re.compile(r"\bapoc\.trigger\b", re.IGNORECASE),
re.compile(r"\bapoc\.custom\b", re.IGNORECASE),
]
# Strip string literals so patterns inside quotes don't cause false positives
# Handles escaped quotes (\' and \") inside strings
_STRING_LITERALS = re.compile(r"'(?:[^'\\]|\\.)*'|\"(?:[^\"\\]|\\.)*\"")
def validate_custom_query(cypher: str) -> None:
"""Reject queries containing known SSRF or dangerous procedure patterns.
Raises ValidationError if a blocked pattern is found.
String literals are stripped before matching to avoid false positives.
"""
stripped = _STRING_LITERALS.sub("", cypher)
for pattern in _BLOCKED_PATTERNS:
if pattern.search(stripped):
raise ValidationError({"query": "Query contains a blocked operation"})
def normalize_custom_query_payload(raw_data):
if not isinstance(raw_data, dict):
@@ -144,15 +173,7 @@ def execute_custom_query(
cypher: str,
provider_id: str,
) -> dict[str, Any]:
# Defense-in-depth for custom queries:
# 1. neo4j.READ_ACCESS — prevents mutations at the driver level
# 2. inject_provider_label() — regex-based label injection scopes node patterns
# 3. _serialize_graph() — post-query filter drops nodes without the provider label
#
# Layer 2 is best-effort (regex can't fully parse Cypher);
# layer 3 is the safety net that guarantees provider isolation.
validate_custom_query(cypher)
cypher = inject_provider_label(cypher, provider_id)
try:
graph = graph_database.execute_read_query(
@@ -187,7 +208,10 @@ def get_cartography_schema(
with graph_database.get_session(
database_name, default_access_mode=neo4j.READ_ACCESS
) as session:
result = session.run(get_cartography_schema_query(provider_id))
result = session.run(
CARTOGRAPHY_SCHEMA_METADATA,
{"provider_id": provider_id},
)
record = result.single()
except graph_database.GraphDatabaseQueryException as exc:
logger.error(f"Cartography schema query failed: {exc}")
@@ -231,12 +255,10 @@ def _truncate_graph(graph: dict[str, Any]) -> dict[str, Any]:
def _serialize_graph(graph, provider_id: str) -> dict[str, Any]:
provider_label = get_provider_label(provider_id)
nodes = []
kept_node_ids = set()
for node in graph.nodes:
if provider_label not in node.labels:
if node._properties.get(PROVIDER_ID_PROPERTY) != provider_id:
continue
kept_node_ids.add(node.element_id)
@@ -251,11 +273,14 @@ def _serialize_graph(graph, provider_id: str) -> dict[str, Any]:
filtered_count = len(graph.nodes) - len(nodes)
if filtered_count > 0:
logger.debug(
f"Filtered {filtered_count} nodes without provider label {provider_label}"
f"Filtered {filtered_count} nodes without matching provider_id={provider_id}"
)
relationships = []
for relationship in graph.relationships:
if relationship._properties.get(PROVIDER_ID_PROPERTY) != provider_id:
continue
if (
relationship.start_node.element_id not in kept_node_ids
or relationship.end_node.element_id not in kept_node_ids
+26 -17
View File
@@ -1,10 +1,10 @@
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import transaction
from rest_framework import permissions
from rest_framework.exceptions import NotAuthenticated
from rest_framework.filters import SearchFilter
from rest_framework.permissions import SAFE_METHODS
from rest_framework.response import Response
from rest_framework_json_api import filters
from rest_framework_json_api.views import ModelViewSet
@@ -12,7 +12,7 @@ from api.authentication import CombinedJWTOrAPIKeyAuthentication
from api.db_router import MainRouter, reset_read_db_alias, set_read_db_alias
from api.db_utils import POSTGRES_USER_VAR, rls_transaction
from api.filters import CustomDjangoFilterBackend
from api.models import Role, UserRoleRelationship
from api.models import Role, Tenant
from api.rbac.permissions import HasPermissions
@@ -113,22 +113,27 @@ class BaseTenantViewset(BaseViewSet):
if request is not None:
request.db_alias = self.db_alias
if request.method == "POST":
with transaction.atomic(using=MainRouter.admin_db):
tenant = super().dispatch(request, *args, **kwargs)
if isinstance(tenant, Response) and tenant.status_code == 201:
self._create_admin_role(tenant.data["id"])
return tenant
else:
with transaction.atomic(using=self.db_alias):
return super().dispatch(request, *args, **kwargs)
with transaction.atomic(using=self.db_alias):
tenant = super().dispatch(request, *args, **kwargs)
try:
# If the request is a POST, create the admin role
if request.method == "POST":
isinstance(tenant, dict) and self._create_admin_role(
tenant.data["id"]
)
except Exception as e:
self._handle_creation_error(e, tenant)
raise
return tenant
finally:
if alias_token is not None:
reset_read_db_alias(alias_token)
self.db_alias = MainRouter.default_db
def _create_admin_role(self, tenant_id):
admin_role = Role.objects.using(MainRouter.admin_db).create(
Role.objects.using(MainRouter.admin_db).create(
name="admin",
tenant_id=tenant_id,
manage_users=True,
@@ -139,11 +144,15 @@ class BaseTenantViewset(BaseViewSet):
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
user=self.request.user,
role=admin_role,
tenant_id=tenant_id,
)
def _handle_creation_error(self, error, tenant):
if tenant.data.get("id"):
try:
Tenant.objects.using(MainRouter.admin_db).filter(
id=tenant.data["id"]
).delete()
except ObjectDoesNotExist:
pass # Tenant might not exist, handle gracefully
def initial(self, request, *args, **kwargs):
if request.auth is None:
+18 -172
View File
@@ -15,7 +15,6 @@ from django_filters.rest_framework import (
from rest_framework_json_api.django_filters.backends import DjangoFilterBackend
from rest_framework_json_api.serializers import ValidationError
from api.constants import SEVERITY_ORDER
from api.db_utils import (
FindingDeltaEnumField,
InvitationStateEnumField,
@@ -44,7 +43,6 @@ from api.models import (
ProviderGroup,
ProviderSecret,
Resource,
ResourceFindingMapping,
ResourceTag,
Role,
Scan,
@@ -198,13 +196,17 @@ class CommonFindingFilters(FilterSet):
field_name="resource_services", lookup_expr="icontains"
)
resource_uid = CharFilter(method="filter_resource_uid")
resource_uid__in = CharInFilter(method="filter_resource_uid_in")
resource_uid__icontains = CharFilter(method="filter_resource_uid_icontains")
resource_uid = CharFilter(field_name="resources__uid")
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
resource_uid__icontains = CharFilter(
field_name="resources__uid", lookup_expr="icontains"
)
resource_name = CharFilter(method="filter_resource_name")
resource_name__in = CharInFilter(method="filter_resource_name_in")
resource_name__icontains = CharFilter(method="filter_resource_name_icontains")
resource_name = CharFilter(field_name="resources__name")
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
resource_name__icontains = CharFilter(
field_name="resources__name", lookup_expr="icontains"
)
resource_type = CharFilter(method="filter_resource_type")
resource_type__in = CharInFilter(field_name="resource_types", lookup_expr="overlap")
@@ -262,52 +264,6 @@ class CommonFindingFilters(FilterSet):
)
return queryset.filter(overall_query).distinct()
def filter_check_title_icontains(self, queryset, name, value):
# Resolve from the summary table (has check_title column + trigram
# GIN index) instead of scanning JSON in the findings table.
matching_check_ids = (
FindingGroupDailySummary.objects.filter(
check_title__icontains=value,
)
.values_list("check_id", flat=True)
.distinct()
)
return queryset.filter(check_id__in=matching_check_ids)
# --- Resource subquery filters ---
# Resolve resource → RFM → finding_ids first, then filter findings
# by id__in. This avoids a 3-way JOIN driven from the (huge)
# findings side and lets PostgreSQL start from the resources
# unique-constraint index instead.
@staticmethod
def _finding_ids_for_resources(**lookup):
return ResourceFindingMapping.objects.filter(
resource__in=Resource.objects.filter(**lookup).values("id")
).values("finding_id")
def filter_resource_uid(self, queryset, name, value):
return queryset.filter(id__in=self._finding_ids_for_resources(uid=value))
def filter_resource_uid_in(self, queryset, name, value):
return queryset.filter(id__in=self._finding_ids_for_resources(uid__in=value))
def filter_resource_uid_icontains(self, queryset, name, value):
return queryset.filter(
id__in=self._finding_ids_for_resources(uid__icontains=value)
)
def filter_resource_name(self, queryset, name, value):
return queryset.filter(id__in=self._finding_ids_for_resources(name=value))
def filter_resource_name_in(self, queryset, name, value):
return queryset.filter(id__in=self._finding_ids_for_resources(name__in=value))
def filter_resource_name_icontains(self, queryset, name, value):
return queryset.filter(
id__in=self._finding_ids_for_resources(name__icontains=value)
)
class TenantFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
@@ -434,7 +390,6 @@ class ScanFilter(ProviderRelationshipFilterSet):
class Meta:
model = Scan
fields = {
"id": ["exact", "in"],
"provider": ["exact", "in"],
"name": ["exact", "icontains"],
"started_at": ["gte", "lte"],
@@ -848,15 +803,11 @@ class FindingGroupFilter(CommonFindingFilters):
check_id = CharFilter(field_name="check_id", lookup_expr="exact")
check_id__in = CharInFilter(field_name="check_id", lookup_expr="in")
check_id__icontains = CharFilter(field_name="check_id", lookup_expr="icontains")
check_title__icontains = CharFilter(method="filter_check_title_icontains")
scan = UUIDFilter(field_name="scan_id", lookup_expr="exact")
scan__in = UUIDInFilter(field_name="scan_id", lookup_expr="in")
class Meta:
model = Finding
fields = {
"check_id": ["exact", "in", "icontains"],
"scan": ["exact", "in"],
}
def filter_queryset(self, queryset):
@@ -944,31 +895,15 @@ class LatestFindingGroupFilter(CommonFindingFilters):
check_id = CharFilter(field_name="check_id", lookup_expr="exact")
check_id__in = CharInFilter(field_name="check_id", lookup_expr="in")
check_id__icontains = CharFilter(field_name="check_id", lookup_expr="icontains")
check_title__icontains = CharFilter(method="filter_check_title_icontains")
scan = UUIDFilter(field_name="scan_id", lookup_expr="exact")
scan__in = UUIDInFilter(field_name="scan_id", lookup_expr="in")
class Meta:
model = Finding
fields = {
"check_id": ["exact", "in", "icontains"],
"scan": ["exact", "in"],
}
class _CheckTitleToCheckIdMixin:
"""Resolve check_title search to check_ids so all provider rows are kept."""
def filter_check_title_to_check_ids(self, queryset, name, value):
matching_check_ids = (
queryset.filter(check_title__icontains=value)
.values_list("check_id", flat=True)
.distinct()
)
return queryset.filter(check_id__in=matching_check_ids)
class FindingGroupSummaryFilter(_CheckTitleToCheckIdMixin, FilterSet):
class FindingGroupSummaryFilter(FilterSet):
"""
Filter for FindingGroupDailySummary queries.
@@ -991,7 +926,9 @@ class FindingGroupSummaryFilter(_CheckTitleToCheckIdMixin, FilterSet):
check_id = CharFilter(field_name="check_id", lookup_expr="exact")
check_id__in = CharInFilter(field_name="check_id", lookup_expr="in")
check_id__icontains = CharFilter(field_name="check_id", lookup_expr="icontains")
check_title__icontains = CharFilter(method="filter_check_title_to_check_ids")
check_title__icontains = CharFilter(
field_name="check_title", lookup_expr="icontains"
)
# Provider filters
provider_id = UUIDFilter(field_name="provider_id", lookup_expr="exact")
@@ -1079,7 +1016,7 @@ class FindingGroupSummaryFilter(_CheckTitleToCheckIdMixin, FilterSet):
return dt
class LatestFindingGroupSummaryFilter(_CheckTitleToCheckIdMixin, FilterSet):
class LatestFindingGroupSummaryFilter(FilterSet):
"""
Filter for FindingGroupDailySummary /latest endpoint.
@@ -1091,7 +1028,9 @@ class LatestFindingGroupSummaryFilter(_CheckTitleToCheckIdMixin, FilterSet):
check_id = CharFilter(field_name="check_id", lookup_expr="exact")
check_id__in = CharInFilter(field_name="check_id", lookup_expr="in")
check_id__icontains = CharFilter(field_name="check_id", lookup_expr="icontains")
check_title__icontains = CharFilter(method="filter_check_title_to_check_ids")
check_title__icontains = CharFilter(
field_name="check_title", lookup_expr="icontains"
)
# Provider filters
provider_id = UUIDFilter(field_name="provider_id", lookup_expr="exact")
@@ -1109,99 +1048,6 @@ class LatestFindingGroupSummaryFilter(_CheckTitleToCheckIdMixin, FilterSet):
}
class FindingGroupAggregatedComputedFilter(FilterSet):
"""Filter aggregated finding-group rows by computed status/severity/muted."""
STATUS_CHOICES = (
("FAIL", "Fail"),
("PASS", "Pass"),
("MANUAL", "Manual"),
)
status = ChoiceFilter(method="filter_status", choices=STATUS_CHOICES)
status__in = CharInFilter(method="filter_status_in", lookup_expr="in")
severity = ChoiceFilter(method="filter_severity", choices=SeverityChoices)
severity__in = CharInFilter(method="filter_severity_in", lookup_expr="in")
muted = BooleanFilter(field_name="muted")
include_muted = BooleanFilter(method="filter_include_muted")
def filter_status(self, queryset, name, value):
return queryset.filter(aggregated_status=value)
def filter_status_in(self, queryset, name, value):
values = value
if isinstance(value, str):
values = [part.strip() for part in value.split(",") if part.strip()]
allowed = {choice[0] for choice in self.STATUS_CHOICES}
invalid = [
status_value for status_value in values if status_value not in allowed
]
if invalid:
raise ValidationError(
[
{
"detail": f"invalid status filter: {invalid[0]}",
"status": "400",
"source": {"pointer": "/data"},
"code": "invalid",
}
]
)
if not values:
return queryset
return queryset.filter(aggregated_status__in=values)
def filter_severity(self, queryset, name, value):
severity_order = SEVERITY_ORDER.get(value)
if severity_order is None:
raise ValidationError(
[
{
"detail": f"invalid severity filter: {value}",
"status": "400",
"source": {"pointer": "/data"},
"code": "invalid",
}
]
)
return queryset.filter(severity_order=severity_order)
def filter_severity_in(self, queryset, name, value):
values = value
if isinstance(value, str):
values = [part.strip() for part in value.split(",") if part.strip()]
orders = []
for severity_value in values:
severity_order = SEVERITY_ORDER.get(severity_value)
if severity_order is None:
raise ValidationError(
[
{
"detail": f"invalid severity filter: {severity_value}",
"status": "400",
"source": {"pointer": "/data"},
"code": "invalid",
}
]
)
orders.append(severity_order)
if not orders:
return queryset
return queryset.filter(severity_order__in=orders)
def filter_include_muted(self, queryset, name, value):
if value is True:
return queryset
# include_muted=false: exclude fully-muted groups
return queryset.exclude(muted=True)
class ProviderSecretFilter(FilterSet):
inserted_at = DateFilter(
field_name="inserted_at",
@@ -1,49 +0,0 @@
from django.db import migrations
TASK_NAME = "attack-paths-cleanup-stale-scans"
INTERVAL_HOURS = 1
def create_periodic_task(apps, schema_editor):
IntervalSchedule = apps.get_model("django_celery_beat", "IntervalSchedule")
PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask")
schedule, _ = IntervalSchedule.objects.get_or_create(
every=INTERVAL_HOURS,
period="hours",
)
PeriodicTask.objects.update_or_create(
name=TASK_NAME,
defaults={
"task": TASK_NAME,
"interval": schedule,
"enabled": True,
},
)
def delete_periodic_task(apps, schema_editor):
IntervalSchedule = apps.get_model("django_celery_beat", "IntervalSchedule")
PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask")
PeriodicTask.objects.filter(name=TASK_NAME).delete()
# Clean up the schedule if no other task references it
IntervalSchedule.objects.filter(
every=INTERVAL_HOURS,
period="hours",
periodictask__isnull=True,
).delete()
class Migration(migrations.Migration):
dependencies = [
("api", "0085_finding_group_daily_summary_trgm_indexes"),
("django_celery_beat", "0019_alter_periodictasks_options"),
]
operations = [
migrations.RunPython(create_periodic_task, delete_periodic_task),
]
@@ -1,40 +0,0 @@
from django.db import migrations
import api.db_utils
class Migration(migrations.Migration):
dependencies = [
("api", "0086_attack_paths_cleanup_periodic_task"),
]
operations = [
migrations.AlterField(
model_name="provider",
name="provider",
field=api.db_utils.ProviderEnumField(
choices=[
("aws", "AWS"),
("azure", "Azure"),
("gcp", "GCP"),
("kubernetes", "Kubernetes"),
("m365", "M365"),
("github", "GitHub"),
("mongodbatlas", "MongoDB Atlas"),
("iac", "IaC"),
("oraclecloud", "Oracle Cloud Infrastructure"),
("alibabacloud", "Alibaba Cloud"),
("cloudflare", "Cloudflare"),
("openstack", "OpenStack"),
("image", "Image"),
("googleworkspace", "Google Workspace"),
("vercel", "Vercel"),
],
default="aws",
),
),
migrations.RunSQL(
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'vercel';",
reverse_sql=migrations.RunSQL.noop,
),
]
@@ -1,95 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0087_vercel_provider"),
]
operations = [
migrations.AddField(
model_name="findinggroupdailysummary",
name="manual_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="pass_muted_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="fail_muted_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="manual_muted_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="muted",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="new_fail_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="new_fail_muted_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="new_pass_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="new_pass_muted_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="new_manual_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="new_manual_muted_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="changed_fail_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="changed_fail_muted_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="changed_pass_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="changed_pass_muted_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="changed_manual_count",
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name="findinggroupdailysummary",
name="changed_manual_muted_count",
field=models.IntegerField(default=0),
),
]
@@ -1,31 +0,0 @@
from django.db import migrations
from tasks.tasks import backfill_finding_group_summaries_task
from api.db_router import MainRouter
from api.rls import Tenant
def trigger_backfill_task(apps, schema_editor):
"""
Re-dispatch the finding-group backfill task for every tenant so the new
`manual_count` and `muted` columns added in 0088 get populated from the
last 10 days of completed scans.
The aggregator (`aggregate_finding_group_summaries`) recomputes every
column on each call, so it back-populates the new fields without touching
the existing ones beyond a normal upsert.
"""
tenant_ids = Tenant.objects.using(MainRouter.admin_db).values_list("id", flat=True)
for tenant_id in tenant_ids:
backfill_finding_group_summaries_task.delay(tenant_id=str(tenant_id), days=10)
class Migration(migrations.Migration):
dependencies = [
("api", "0088_finding_group_status_muted_fields"),
]
operations = [
migrations.RunPython(trigger_backfill_task, migrations.RunPython.noop),
]
@@ -1,23 +0,0 @@
from django.db import migrations
TASK_NAME = "attack-paths-cleanup-stale-scans"
def set_cleanup_priority(apps, schema_editor):
PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask")
PeriodicTask.objects.filter(name=TASK_NAME).update(priority=0)
def unset_cleanup_priority(apps, schema_editor):
PeriodicTask = apps.get_model("django_celery_beat", "PeriodicTask")
PeriodicTask.objects.filter(name=TASK_NAME).update(priority=None)
class Migration(migrations.Migration):
dependencies = [
("api", "0089_backfill_finding_group_status_muted"),
]
operations = [
migrations.RunPython(set_cleanup_priority, unset_cleanup_priority),
]
+3 -43
View File
@@ -4,11 +4,11 @@ import re
from datetime import datetime, timedelta, timezone
from uuid import UUID, uuid4
import defusedxml
from allauth.socialaccount.models import SocialApp
from config.custom_logging import BackendLogger
from config.settings.social_login import SOCIALACCOUNT_PROVIDERS
from cryptography.fernet import Fernet, InvalidToken
import defusedxml
from defusedxml import ElementTree as ET
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser
@@ -295,7 +295,6 @@ class Provider(RowLevelSecurityProtectedModel):
OPENSTACK = "openstack", _("OpenStack")
IMAGE = "image", _("Image")
GOOGLEWORKSPACE = "googleworkspace", _("Google Workspace")
VERCEL = "vercel", _("Vercel")
@staticmethod
def validate_aws_uid(value):
@@ -439,15 +438,6 @@ class Provider(RowLevelSecurityProtectedModel):
pointer="/data/attributes/uid",
)
@staticmethod
def validate_vercel_uid(value):
if not re.match(r"^team_[a-zA-Z0-9]{16,32}$", value):
raise ModelValidationError(
detail="Vercel provider ID must be a valid Vercel Team ID (e.g., team_xxxxxxxxxxxxxxxxxxxxxxxx).",
code="vercel-uid",
pointer="/data/attributes/uid",
)
@staticmethod
def validate_image_uid(value):
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9._/:@-]{2,249}$", value):
@@ -1748,45 +1738,15 @@ class FindingGroupDailySummary(RowLevelSecurityProtectedModel):
# Severity stored as integer for MAX aggregation (5=critical, 4=high, etc.)
severity_order = models.SmallIntegerField(default=1)
# Finding counts (inclusive of muted findings; use the `muted` flag to
# tell whether the group has any actionable findings).
# Finding counts
pass_count = models.IntegerField(default=0)
fail_count = models.IntegerField(default=0)
manual_count = models.IntegerField(default=0)
muted_count = models.IntegerField(default=0)
# Status counts restricted to muted findings, so clients can isolate the
# muted half of each status (e.g. `pass_count - pass_muted_count` gives the
# actionable PASS findings).
pass_muted_count = models.IntegerField(default=0)
fail_muted_count = models.IntegerField(default=0)
manual_muted_count = models.IntegerField(default=0)
# Whether every finding for this (provider, check, day) is muted.
muted = models.BooleanField(default=False)
# Delta counts (non-muted, kept for convenience and as a "total" view).
# Delta counts
new_count = models.IntegerField(default=0)
changed_count = models.IntegerField(default=0)
# Delta breakdown by (status, muted) so clients can answer questions like
# "how many new failing findings appeared in this scan?" without scanning
# the underlying findings table. Mirrors the existing pass/fail/manual
# naming, with `_muted_count` siblings tracking the muted half of each
# bucket explicitly.
new_fail_count = models.IntegerField(default=0)
new_fail_muted_count = models.IntegerField(default=0)
new_pass_count = models.IntegerField(default=0)
new_pass_muted_count = models.IntegerField(default=0)
new_manual_count = models.IntegerField(default=0)
new_manual_muted_count = models.IntegerField(default=0)
changed_fail_count = models.IntegerField(default=0)
changed_fail_muted_count = models.IntegerField(default=0)
changed_pass_count = models.IntegerField(default=0)
changed_pass_muted_count = models.IntegerField(default=0)
changed_manual_count = models.IntegerField(default=0)
changed_manual_muted_count = models.IntegerField(default=0)
# Resource counts
resources_fail = models.IntegerField(default=0)
resources_total = models.IntegerField(default=0)
+7 -19
View File
@@ -1,7 +1,7 @@
from enum import Enum
from typing import Optional
from django.db.models import QuerySet
from rest_framework.exceptions import PermissionDenied
from rest_framework.permissions import BasePermission
from api.db_router import MainRouter
@@ -29,17 +29,8 @@ class HasPermissions(BasePermission):
if not required_permissions:
return True
tenant_id = getattr(request, "tenant_id", None)
if not tenant_id:
tenant_id = request.auth.get("tenant_id") if request.auth else None
if not tenant_id:
return False
user_roles = (
User.objects.using(MainRouter.admin_db)
.get(id=request.user.id)
.roles.using(MainRouter.admin_db)
.filter(tenant_id=tenant_id)
User.objects.using(MainRouter.admin_db).get(id=request.user.id).roles.all()
)
if not user_roles:
return False
@@ -51,17 +42,14 @@ class HasPermissions(BasePermission):
return True
def get_role(user: User, tenant_id: str) -> Role:
def get_role(user: User) -> Optional[Role]:
"""
Retrieve the role assigned to the given user in the specified tenant.
Retrieve the first role assigned to the given user.
Raises:
PermissionDenied: If the user has no role in the given tenant.
Returns:
The user's first Role instance if the user has any roles, otherwise None.
"""
role = user.roles.using(MainRouter.admin_db).filter(tenant_id=tenant_id).first()
if role is None:
raise PermissionDenied("User has no role in this tenant.")
return role
return user.roles.first()
def get_providers(role: Role) -> QuerySet[Provider]:
+1 -1
View File
@@ -61,7 +61,7 @@ def revoke_membership_api_keys(sender, instance, **kwargs): # noqa: F841
in that tenant should be revoked to prevent further access.
"""
TenantAPIKey.objects.filter(
entity_id=instance.user_id, tenant_id=instance.tenant_id
entity=instance.user, tenant_id=instance.tenant.id
).update(revoked=True)
+1 -133
View File
@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: Prowler API
version: 1.25.5
version: 1.23.1
description: |-
Prowler API specification.
@@ -372,7 +372,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -388,7 +387,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -411,7 +409,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -429,7 +426,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -1355,7 +1351,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -1371,7 +1366,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -1833,7 +1827,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -1849,7 +1842,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -1872,7 +1864,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -1890,7 +1881,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -2439,7 +2429,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -2455,7 +2444,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -2478,7 +2466,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -2496,7 +2483,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -2953,7 +2939,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -2969,7 +2954,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -2992,7 +2976,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -3010,7 +2993,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -3465,7 +3447,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -3481,7 +3462,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -3504,7 +3484,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -3522,7 +3501,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -3965,7 +3943,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -3981,7 +3958,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -4004,7 +3980,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -4022,7 +3997,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -5806,7 +5780,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -5822,7 +5795,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -5845,7 +5817,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -5863,7 +5834,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- name: filter[search]
@@ -5985,7 +5955,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -6001,7 +5970,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -6024,7 +5992,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -6042,7 +6009,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- name: filter[search]
@@ -6153,7 +6119,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -6169,7 +6134,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -6191,7 +6155,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -6209,7 +6172,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- name: filter[search]
@@ -6352,7 +6314,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -6368,7 +6329,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -6391,7 +6351,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -6409,7 +6368,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -6565,7 +6523,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -6581,7 +6538,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -6604,7 +6560,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -6622,7 +6577,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -6772,7 +6726,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -6788,7 +6741,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -6810,7 +6762,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -6828,7 +6779,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- name: filter[search]
@@ -7020,7 +6970,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -7036,7 +6985,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -7059,7 +7007,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -7077,7 +7024,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -7198,7 +7144,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -7214,7 +7159,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -7237,7 +7181,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -7255,7 +7198,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -7400,7 +7342,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -7416,7 +7357,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -7439,7 +7379,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -7457,7 +7396,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -8243,7 +8181,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -8259,7 +8196,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider__in]
schema:
@@ -8282,7 +8218,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -8300,7 +8235,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -8323,7 +8257,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -8339,7 +8272,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -8362,7 +8294,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -8380,7 +8311,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- name: filter[search]
@@ -9050,7 +8980,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -9066,7 +8995,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -9089,7 +9017,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -9107,7 +9034,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -9601,7 +9527,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -9617,7 +9542,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -9640,7 +9564,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -9658,7 +9581,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -9965,7 +9887,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -9981,7 +9902,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -10004,7 +9924,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -10022,7 +9941,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -10335,7 +10253,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -10351,7 +10268,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -10374,7 +10290,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -10392,7 +10307,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -11215,7 +11129,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
* `aws` - AWS
* `azure` - Azure
@@ -11231,7 +11144,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
- in: query
name: filter[provider_type__in]
schema:
@@ -11254,7 +11166,6 @@ paths:
- mongodbatlas
- openstack
- oraclecloud
- vercel
description: |-
Multiple values may be separated by commas.
@@ -11272,7 +11183,6 @@ paths:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
explode: false
style: form
- in: query
@@ -18553,15 +18463,6 @@ components:
required:
- clouds_yaml_content
- clouds_yaml_cloud
- type: object
title: Vercel API Token
properties:
api_token:
type: string
description: Vercel API token for authentication. Can be scoped
to a specific team.
required:
- api_token
writeOnly: true
required:
- secret
@@ -19564,7 +19465,6 @@ components:
- openstack
- image
- googleworkspace
- vercel
type: string
description: |-
* `aws` - AWS
@@ -19581,7 +19481,6 @@ components:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
x-spec-enum-id: c0d56cad8ab9abe5
uid:
type: string
@@ -19702,7 +19601,6 @@ components:
- openstack
- image
- googleworkspace
- vercel
type: string
x-spec-enum-id: c0d56cad8ab9abe5
description: |-
@@ -19722,7 +19620,6 @@ components:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
uid:
type: string
title: Unique identifier for the provider, set by the provider
@@ -19774,7 +19671,6 @@ components:
- openstack
- image
- googleworkspace
- vercel
type: string
x-spec-enum-id: c0d56cad8ab9abe5
description: |-
@@ -19794,7 +19690,6 @@ components:
* `openstack` - OpenStack
* `image` - Image
* `googleworkspace` - Google Workspace
* `vercel` - Vercel
uid:
type: string
minLength: 3
@@ -20644,15 +20539,6 @@ components:
required:
- clouds_yaml_content
- clouds_yaml_cloud
- type: object
title: Vercel API Token
properties:
api_token:
type: string
description: Vercel API token for authentication. Can be scoped
to a specific team.
required:
- api_token
writeOnly: true
required:
- secret_type
@@ -21069,15 +20955,6 @@ components:
required:
- clouds_yaml_content
- clouds_yaml_cloud
- type: object
title: Vercel API Token
properties:
api_token:
type: string
description: Vercel API token for authentication. Can be scoped
to a specific team.
required:
- api_token
writeOnly: true
required:
- secret_type
@@ -21504,15 +21381,6 @@ components:
required:
- clouds_yaml_content
- clouds_yaml_cloud
- type: object
title: Vercel API Token
properties:
api_token:
type: string
description: Vercel API token for authentication. Can be scoped
to a specific team.
required:
- api_token
writeOnly: true
required:
- secret
@@ -215,21 +215,6 @@ class TestTokenSwitchTenant:
tenant_id = tenants_fixture[0].id
user_instance = User.objects.get(email=test_user)
Membership.objects.create(user=user_instance, tenant_id=tenant_id)
# Assign an admin role in the target tenant so the user can access resources
target_role = Role.objects.create(
name="admin",
tenant_id=tenant_id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.create(
user=user_instance, role=target_role, tenant_id=tenant_id
)
# Check that using our new user's credentials we can authenticate and get the providers
access_token, _ = get_api_tokens(client, test_user, test_password)
+97 -24
View File
@@ -11,7 +11,7 @@ from api.attack_paths import database as graph_database
from api.attack_paths import views_helpers
from tasks.jobs.attack_paths.config import (
PROVIDER_ELEMENT_ID_PROPERTY,
get_provider_label,
PROVIDER_ID_PROPERTY,
)
@@ -53,7 +53,7 @@ def test_prepare_parameters_includes_provider_and_casts(
)
assert result["provider_uid"] == "123456789012"
assert "provider_id" not in result
assert result["provider_id"] == "test-provider-id"
assert result["limit"] == 5
@@ -107,12 +107,12 @@ def test_execute_query_serializes_graph(
parameters = {"provider_uid": "123"}
provider_id = "test-provider-123"
plabel = get_provider_label(provider_id)
node = attack_paths_graph_stub_classes.Node(
element_id="node-1",
labels=["AWSAccount", plabel],
labels=["AWSAccount"],
properties={
"name": "account",
PROVIDER_ID_PROPERTY: provider_id,
"complex": {
"items": [
attack_paths_graph_stub_classes.NativeValue("value"),
@@ -121,13 +121,15 @@ def test_execute_query_serializes_graph(
},
},
)
node_2 = attack_paths_graph_stub_classes.Node("node-2", ["RDSInstance", plabel], {})
node_2 = attack_paths_graph_stub_classes.Node(
"node-2", ["RDSInstance"], {PROVIDER_ID_PROPERTY: provider_id}
)
relationship = attack_paths_graph_stub_classes.Relationship(
element_id="rel-1",
rel_type="OWNS",
start_node=node,
end_node=node_2,
properties={"weight": 1},
properties={"weight": 1, PROVIDER_ID_PROPERTY: provider_id},
)
graph = SimpleNamespace(nodes=[node, node_2], relationships=[relationship])
@@ -211,27 +213,29 @@ def test_execute_query_raises_permission_denied_on_read_only(
)
def test_serialize_graph_filters_by_provider_label(attack_paths_graph_stub_classes):
def test_serialize_graph_filters_by_provider_id(attack_paths_graph_stub_classes):
provider_id = "provider-keep"
plabel = get_provider_label(provider_id)
other_label = get_provider_label("provider-other")
node_keep = attack_paths_graph_stub_classes.Node("n1", ["AWSAccount", plabel], {})
node_keep = attack_paths_graph_stub_classes.Node(
"n1", ["AWSAccount"], {PROVIDER_ID_PROPERTY: provider_id}
)
node_drop = attack_paths_graph_stub_classes.Node(
"n2", ["AWSAccount", other_label], {}
"n2", ["AWSAccount"], {PROVIDER_ID_PROPERTY: "provider-other"}
)
rel_keep = attack_paths_graph_stub_classes.Relationship(
"r1", "OWNS", node_keep, node_keep, {}
"r1", "OWNS", node_keep, node_keep, {PROVIDER_ID_PROPERTY: provider_id}
)
rel_drop_by_provider = attack_paths_graph_stub_classes.Relationship(
"r2", "OWNS", node_keep, node_drop, {PROVIDER_ID_PROPERTY: "provider-other"}
)
# Relationship connecting a kept node to a dropped node — filtered by endpoint check
rel_drop_orphaned = attack_paths_graph_stub_classes.Relationship(
"r2", "OWNS", node_keep, node_drop, {}
"r3", "OWNS", node_keep, node_drop, {PROVIDER_ID_PROPERTY: provider_id}
)
graph = SimpleNamespace(
nodes=[node_keep, node_drop],
relationships=[rel_keep, rel_drop_orphaned],
relationships=[rel_keep, rel_drop_by_provider, rel_drop_orphaned],
)
result = views_helpers._serialize_graph(graph, provider_id)
@@ -350,6 +354,7 @@ def test_serialize_properties_filters_internal_fields():
"_module_name": "cartography:aws",
"_module_version": "0.98.0",
# Provider isolation
PROVIDER_ID_PROPERTY: "42",
PROVIDER_ELEMENT_ID_PROPERTY: "42:abc123",
}
@@ -444,11 +449,14 @@ def test_execute_custom_query_serializes_graph(
attack_paths_graph_stub_classes,
):
provider_id = "test-provider-123"
plabel = get_provider_label(provider_id)
node_1 = attack_paths_graph_stub_classes.Node("node-1", ["AWSAccount", plabel], {})
node_2 = attack_paths_graph_stub_classes.Node("node-2", ["RDSInstance", plabel], {})
node_1 = attack_paths_graph_stub_classes.Node(
"node-1", ["AWSAccount"], {PROVIDER_ID_PROPERTY: provider_id}
)
node_2 = attack_paths_graph_stub_classes.Node(
"node-2", ["RDSInstance"], {PROVIDER_ID_PROPERTY: provider_id}
)
relationship = attack_paths_graph_stub_classes.Relationship(
"rel-1", "OWNS", node_1, node_2, {}
"rel-1", "OWNS", node_1, node_2, {PROVIDER_ID_PROPERTY: provider_id}
)
graph_result = MagicMock()
@@ -463,11 +471,10 @@ def test_execute_custom_query_serializes_graph(
"db-tenant-test", "MATCH (n) RETURN n", provider_id
)
mock_execute.assert_called_once()
call_kwargs = mock_execute.call_args[1]
assert call_kwargs["database"] == "db-tenant-test"
# The cypher is rewritten with the provider label injection
assert plabel in call_kwargs["cypher"]
mock_execute.assert_called_once_with(
database="db-tenant-test",
cypher="MATCH (n) RETURN n",
)
assert len(result["nodes"]) == 2
assert result["relationships"][0]["label"] == "OWNS"
assert result["truncated"] is False
@@ -504,6 +511,72 @@ def test_execute_custom_query_wraps_graph_errors():
mock_logger.error.assert_called_once()
# -- validate_custom_query ------------------------------------------------
@pytest.mark.parametrize(
"cypher",
[
"LOAD CSV FROM 'http://169.254.169.254/' AS x RETURN x",
"load csv from 'http://evil.com' as row return row",
"CALL apoc.load.json('http://evil.com/') YIELD value RETURN value",
"CALL apoc.load.csvParams('http://evil.com/', {}, null) YIELD list RETURN list",
"CALL apoc.import.csv([{fileName: 'f'}], [], {}) YIELD node RETURN node",
"CALL apoc.export.csv.all('file.csv', {})",
"CALL apoc.cypher.run('CREATE (n)', {}) YIELD value RETURN value",
"CALL apoc.systemdb.graph() YIELD nodes RETURN nodes",
"CALL apoc.config.list() YIELD key, value RETURN key, value",
"CALL apoc.periodic.iterate('MATCH (n) RETURN n', 'DELETE n', {batchSize: 100})",
"CALL apoc.do.when(true, 'CREATE (n) RETURN n', '', {}) YIELD value RETURN value",
"CALL apoc.trigger.add('t', 'RETURN 1', {phase: 'before'})",
"CALL apoc.custom.asProcedure('myProc', 'RETURN 1')",
],
ids=[
"LOAD_CSV",
"LOAD_CSV_lowercase",
"apoc.load.json",
"apoc.load.csvParams",
"apoc.import.csv",
"apoc.export.csv",
"apoc.cypher.run",
"apoc.systemdb.graph",
"apoc.config.list",
"apoc.periodic.iterate",
"apoc.do.when",
"apoc.trigger.add",
"apoc.custom.asProcedure",
],
)
def test_validate_custom_query_rejects_blocked_patterns(cypher):
with pytest.raises(ValidationError) as exc:
views_helpers.validate_custom_query(cypher)
assert "blocked operation" in str(exc.value.detail)
@pytest.mark.parametrize(
"cypher",
[
"MATCH (n:AWSAccount) RETURN n LIMIT 10",
"MATCH (a)-[r]->(b) RETURN a, r, b",
"MATCH (n) WHERE n.name CONTAINS 'load' RETURN n",
"CALL apoc.create.vNode(['Label'], {}) YIELD node RETURN node",
"MATCH (n) WHERE n.name = 'apoc.load.json' RETURN n",
'MATCH (n) WHERE n.description = "LOAD CSV is cool" RETURN n',
],
ids=[
"simple_match",
"traversal",
"contains_load_substring",
"apoc_virtual_node",
"apoc_load_inside_single_quotes",
"load_csv_inside_double_quotes",
],
)
def test_validate_custom_query_allows_clean_queries(cypher):
views_helpers.validate_custom_query(cypher)
# -- _truncate_graph ----------------------------------------------------------
@@ -12,8 +12,6 @@ from unittest.mock import MagicMock, patch
import neo4j
import pytest
import api.attack_paths.database as db_module
class TestLazyInitialization:
"""Test that Neo4j driver is initialized lazily on first use."""
@@ -21,6 +19,8 @@ class TestLazyInitialization:
@pytest.fixture(autouse=True)
def reset_module_state(self):
"""Reset module-level singleton state before each test."""
import api.attack_paths.database as db_module
original_driver = db_module._driver
db_module._driver = None
@@ -31,6 +31,8 @@ class TestLazyInitialization:
def test_driver_not_initialized_at_import(self):
"""Driver should be None after module import (no eager connection)."""
import api.attack_paths.database as db_module
assert db_module._driver is None
@patch("api.attack_paths.database.settings")
@@ -39,6 +41,8 @@ class TestLazyInitialization:
self, mock_driver_factory, mock_settings
):
"""init_driver() should create connection only when called."""
import api.attack_paths.database as db_module
mock_driver = MagicMock()
mock_driver_factory.return_value = mock_driver
mock_settings.DATABASES = {
@@ -65,6 +69,8 @@ class TestLazyInitialization:
self, mock_driver_factory, mock_settings
):
"""Subsequent calls should return cached driver without reconnecting."""
import api.attack_paths.database as db_module
mock_driver = MagicMock()
mock_driver_factory.return_value = mock_driver
mock_settings.DATABASES = {
@@ -93,6 +99,8 @@ class TestLazyInitialization:
self, mock_driver_factory, mock_settings
):
"""get_driver() should use init_driver() for lazy initialization."""
import api.attack_paths.database as db_module
mock_driver = MagicMock()
mock_driver_factory.return_value = mock_driver
mock_settings.DATABASES = {
@@ -110,50 +118,14 @@ class TestLazyInitialization:
mock_driver_factory.assert_called_once()
class TestConnectionAcquisitionTimeout:
"""Test that the connection acquisition timeout is configurable."""
@pytest.fixture(autouse=True)
def reset_module_state(self):
original_driver = db_module._driver
original_timeout = db_module.CONN_ACQUISITION_TIMEOUT
db_module._driver = None
yield
db_module._driver = original_driver
db_module.CONN_ACQUISITION_TIMEOUT = original_timeout
@patch("api.attack_paths.database.settings")
@patch("api.attack_paths.database.neo4j.GraphDatabase.driver")
def test_driver_receives_configured_timeout(
self, mock_driver_factory, mock_settings
):
"""init_driver() should pass CONN_ACQUISITION_TIMEOUT to the neo4j driver."""
mock_driver_factory.return_value = MagicMock()
mock_settings.DATABASES = {
"neo4j": {
"HOST": "localhost",
"PORT": 7687,
"USER": "neo4j",
"PASSWORD": "password",
}
}
db_module.CONN_ACQUISITION_TIMEOUT = 42
db_module.init_driver()
_, kwargs = mock_driver_factory.call_args
assert kwargs["connection_acquisition_timeout"] == 42
class TestAtexitRegistration:
"""Test that atexit cleanup handler is registered correctly."""
@pytest.fixture(autouse=True)
def reset_module_state(self):
"""Reset module-level singleton state before each test."""
import api.attack_paths.database as db_module
original_driver = db_module._driver
db_module._driver = None
@@ -169,6 +141,8 @@ class TestAtexitRegistration:
self, mock_driver_factory, mock_atexit_register, mock_settings
):
"""atexit.register should be called on first initialization."""
import api.attack_paths.database as db_module
mock_driver_factory.return_value = MagicMock()
mock_settings.DATABASES = {
"neo4j": {
@@ -194,6 +168,8 @@ class TestAtexitRegistration:
The double-checked locking on _driver ensures the atexit registration
block only executes once (when _driver is first created).
"""
import api.attack_paths.database as db_module
mock_driver_factory.return_value = MagicMock()
mock_settings.DATABASES = {
"neo4j": {
@@ -218,6 +194,8 @@ class TestCloseDriver:
@pytest.fixture(autouse=True)
def reset_module_state(self):
"""Reset module-level singleton state before each test."""
import api.attack_paths.database as db_module
original_driver = db_module._driver
db_module._driver = None
@@ -228,6 +206,8 @@ class TestCloseDriver:
def test_close_driver_closes_and_clears_driver(self):
"""close_driver() should close the driver and set it to None."""
import api.attack_paths.database as db_module
mock_driver = MagicMock()
db_module._driver = mock_driver
@@ -238,6 +218,8 @@ class TestCloseDriver:
def test_close_driver_handles_none_driver(self):
"""close_driver() should handle case where driver is None."""
import api.attack_paths.database as db_module
db_module._driver = None
# Should not raise
@@ -247,6 +229,8 @@ class TestCloseDriver:
def test_close_driver_clears_driver_even_on_close_error(self):
"""Driver should be cleared even if close() raises an exception."""
import api.attack_paths.database as db_module
mock_driver = MagicMock()
mock_driver.close.side_effect = Exception("Connection error")
db_module._driver = mock_driver
@@ -262,6 +246,8 @@ class TestExecuteReadQuery:
"""Test read query execution helper."""
def test_execute_read_query_calls_read_session_and_returns_result(self):
import api.attack_paths.database as db_module
tx = MagicMock()
expected_graph = MagicMock()
run_result = MagicMock()
@@ -303,6 +289,8 @@ class TestExecuteReadQuery:
assert result is expected_graph
def test_execute_read_query_defaults_parameters_to_empty_dict(self):
import api.attack_paths.database as db_module
tx = MagicMock()
run_result = MagicMock()
run_result.graph.return_value = MagicMock()
@@ -337,6 +325,8 @@ class TestGetSessionReadOnly:
@pytest.fixture(autouse=True)
def reset_module_state(self):
import api.attack_paths.database as db_module
original_driver = db_module._driver
db_module._driver = None
yield
@@ -351,6 +341,8 @@ class TestGetSessionReadOnly:
)
def test_get_session_raises_write_query_not_allowed(self, neo4j_code):
"""Read-mode Neo4j errors should raise `WriteQueryNotAllowedException`."""
import api.attack_paths.database as db_module
mock_session = MagicMock()
neo4j_error = neo4j.exceptions.Neo4jError._hydrate_neo4j(
code=neo4j_code,
@@ -370,6 +362,8 @@ class TestGetSessionReadOnly:
def test_get_session_raises_generic_exception_for_other_errors(self):
"""Non-read-mode Neo4j errors should raise GraphDatabaseQueryException."""
import api.attack_paths.database as db_module
mock_session = MagicMock()
neo4j_error = neo4j.exceptions.Neo4jError._hydrate_neo4j(
code="Neo.ClientError.Statement.SyntaxError",
@@ -394,6 +388,8 @@ class TestThreadSafety:
@pytest.fixture(autouse=True)
def reset_module_state(self):
"""Reset module-level singleton state before each test."""
import api.attack_paths.database as db_module
original_driver = db_module._driver
db_module._driver = None
@@ -408,6 +404,8 @@ class TestThreadSafety:
self, mock_driver_factory, mock_settings
):
"""Multiple threads calling init_driver() should create only one driver."""
import api.attack_paths.database as db_module
mock_driver = MagicMock()
mock_driver_factory.return_value = mock_driver
mock_settings.DATABASES = {
@@ -450,6 +448,8 @@ class TestHasProviderData:
"""Test has_provider_data helper for checking provider nodes in Neo4j."""
def test_returns_true_when_nodes_exist(self):
import api.attack_paths.database as db_module
mock_session = MagicMock()
mock_result = MagicMock()
mock_result.single.return_value = MagicMock() # non-None record
@@ -468,6 +468,8 @@ class TestHasProviderData:
mock_session.run.assert_called_once()
def test_returns_false_when_no_nodes(self):
import api.attack_paths.database as db_module
mock_session = MagicMock()
mock_result = MagicMock()
mock_result.single.return_value = None
@@ -484,6 +486,8 @@ class TestHasProviderData:
assert db_module.has_provider_data("db-tenant-abc", "provider-123") is False
def test_returns_false_when_database_not_found(self):
import api.attack_paths.database as db_module
session_ctx = MagicMock()
session_ctx.__enter__.side_effect = db_module.GraphDatabaseQueryException(
message="Database does not exist",
@@ -499,6 +503,8 @@ class TestHasProviderData:
)
def test_raises_on_other_errors(self):
import api.attack_paths.database as db_module
session_ctx = MagicMock()
session_ctx.__enter__.side_effect = db_module.GraphDatabaseQueryException(
message="Connection refused",
@@ -1,43 +0,0 @@
import pytest
from config.settings.celery import _build_celery_broker_url
class TestBuildCeleryBrokerUrl:
def test_without_credentials(self):
broker_url = _build_celery_broker_url("redis", "", "", "valkey", "6379", "0")
assert broker_url == "redis://valkey:6379/0"
def test_with_password_only(self):
broker_url = _build_celery_broker_url(
"rediss", "", "secret", "cache.example.com", "6379", "0"
)
assert broker_url == "rediss://:secret@cache.example.com:6379/0"
def test_with_username_and_password(self):
broker_url = _build_celery_broker_url(
"rediss", "default", "secret", "cache.example.com", "6379", "0"
)
assert broker_url == "rediss://default:secret@cache.example.com:6379/0"
def test_with_username_only(self):
broker_url = _build_celery_broker_url(
"redis", "admin", "", "valkey", "6379", "0"
)
assert broker_url == "redis://admin@valkey:6379/0"
def test_url_encodes_credentials(self):
broker_url = _build_celery_broker_url(
"rediss", "user@name", "p@ss:word", "cache.example.com", "6379", "0"
)
assert (
broker_url == "rediss://user%40name:p%40ss%3Aword@cache.example.com:6379/0"
)
def test_invalid_scheme_raises_error(self):
with pytest.raises(ValueError, match="Invalid VALKEY_SCHEME 'http'"):
_build_celery_broker_url("http", "", "", "valkey", "6379", "0")
@@ -1,429 +0,0 @@
"""Unit tests for the Cypher sanitizer (validation + provider-label injection)."""
from unittest.mock import patch
import pytest
from rest_framework.exceptions import ValidationError
from api.attack_paths.cypher_sanitizer import (
inject_provider_label,
validate_custom_query,
)
PROVIDER_ID = "019c41ee-7df3-7dec-a684-d839f95619f8"
LABEL = "_Provider_019c41ee7df37deca684d839f95619f8"
def _inject(cypher: str) -> str:
"""Shortcut that patches `get_provider_label` to avoid config imports."""
with patch(
"api.attack_paths.cypher_sanitizer.get_provider_label", return_value=LABEL
):
return inject_provider_label(cypher, PROVIDER_ID)
# ---------------------------------------------------------------------------
# Pass A - Labeled node patterns (all clauses)
# ---------------------------------------------------------------------------
class TestLabeledNodes:
def test_single_label(self):
result = _inject("MATCH (n:AWSRole) RETURN n")
assert f"(n:AWSRole:{LABEL})" in result
def test_label_with_properties(self):
result = _inject("MATCH (n:AWSRole {name: 'admin'}) RETURN n")
assert f"(n:AWSRole:{LABEL} {{name: 'admin'}})" in result
def test_multiple_labels(self):
result = _inject("MATCH (n:AWSRole:AWSPrincipal) RETURN n")
assert f"(n:AWSRole:AWSPrincipal:{LABEL})" in result
def test_anonymous_labeled(self):
result = _inject(
"MATCH (:AWSPrincipal {arn: 'ecs-tasks.amazonaws.com'}) RETURN 1"
)
assert f"(:AWSPrincipal:{LABEL} {{arn: 'ecs-tasks.amazonaws.com'}})" in result
def test_backtick_label(self):
result = _inject("MATCH (n:`My Label`) RETURN n")
assert f"(n:`My Label`:{LABEL})" in result
def test_labeled_in_where_clause(self):
"""Labeled nodes in WHERE (pattern existence) still get the label."""
result = _inject(
"MATCH (n:AWSRole) WHERE EXISTS((n)-[:REL]->(:Target)) RETURN n"
)
assert f"(n:AWSRole:{LABEL})" in result
assert f"(:Target:{LABEL})" in result
def test_labeled_in_return_clause(self):
"""Labeled nodes in RETURN still get the label (they're always node patterns)."""
result = _inject("MATCH (n:AWSRole) RETURN (n:AWSRole)")
assert result.count(f":AWSRole:{LABEL}") == 2
def test_labeled_in_optional_match(self):
result = _inject(
"OPTIONAL MATCH (pf:ProwlerFinding {status: 'FAIL'}) RETURN pf"
)
assert f"(pf:ProwlerFinding:{LABEL} {{status: 'FAIL'}})" in result
# ---------------------------------------------------------------------------
# Pass B - Bare node patterns (MATCH/OPTIONAL MATCH only)
# ---------------------------------------------------------------------------
class TestBareNodes:
def test_bare_in_match(self):
result = _inject("MATCH (a)-[:HAS_POLICY]->(b) RETURN a, b")
assert f"(a:{LABEL})" in result
assert f"(b:{LABEL})" in result
def test_bare_with_properties_in_match(self):
result = _inject("MATCH (n {name: 'x'}) RETURN n")
assert f"(n:{LABEL} {{name: 'x'}})" in result
def test_bare_in_optional_match(self):
result = _inject("OPTIONAL MATCH (n)-[r]-(m) RETURN n")
assert f"(n:{LABEL})" in result
assert f"(m:{LABEL})" in result
def test_bare_not_injected_in_return(self):
"""Bare (identifier) in RETURN could be expression grouping."""
cypher = "MATCH (n:AWSRole) RETURN (n)"
result = _inject(cypher)
# The labeled (n:AWSRole) gets the label, but the bare (n) in RETURN should not
assert f"(n:AWSRole:{LABEL})" in result
# Count how many times the label appears - should be 1 (from MATCH only)
assert result.count(LABEL) == 1
def test_bare_not_injected_in_where(self):
cypher = "MATCH (n:AWSRole) WHERE (n.x > 1) RETURN n"
result = _inject(cypher)
# (n.x > 1) is an expression group, not a node pattern - should be untouched
assert "(n.x > 1)" in result
def test_bare_not_injected_in_with(self):
cypher = "MATCH (n:AWSRole) WITH (n) RETURN n"
result = _inject(cypher)
assert result.count(LABEL) == 1
def test_bare_not_injected_in_unwind(self):
cypher = "UNWIND nodes(path) as n OPTIONAL MATCH (n)-[r]-(m) RETURN n"
result = _inject(cypher)
# (n) and (m) in OPTIONAL MATCH get injected, but nodes(path) in UNWIND does not
assert f"(n:{LABEL})" in result
assert f"(m:{LABEL})" in result
# ---------------------------------------------------------------------------
# Function call exclusion
# ---------------------------------------------------------------------------
class TestFunctionCallExclusion:
@pytest.mark.parametrize(
"func_call",
[
"collect(DISTINCT pf)",
"any(x IN stmt.action WHERE toLower(x) = 'iam:*')",
"toLower(action)",
"nodes(path)",
"count(n)",
"apoc.create.vNode(labels)",
"EXISTS(n.prop)",
"size(n.list)",
],
)
def test_function_calls_not_injected(self, func_call):
cypher = f"MATCH (n:AWSRole) WHERE {func_call} RETURN n"
result = _inject(cypher)
# The function call should remain unchanged
assert func_call in result
# Only the MATCH labeled node should get the label
assert result.count(LABEL) == 1
# ---------------------------------------------------------------------------
# String and comment protection
# ---------------------------------------------------------------------------
class TestProtection:
def test_string_with_fake_node_pattern(self):
cypher = "MATCH (n:AWSRole) WHERE n.name = '(fake:Label)' RETURN n"
result = _inject(cypher)
assert "'(fake:Label)'" in result
assert result.count(LABEL) == 1
def test_double_quoted_string(self):
cypher = 'MATCH (n:AWSRole) WHERE n.name = "(fake:Label)" RETURN n'
result = _inject(cypher)
assert '"(fake:Label)"' in result
assert result.count(LABEL) == 1
def test_line_comment_with_node_pattern(self):
cypher = "// (n:Fake)\nMATCH (n:AWSRole) RETURN n"
result = _inject(cypher)
assert "// (n:Fake)" in result
assert result.count(LABEL) == 1
def test_string_containing_double_slash(self):
"""Strings with // inside should be consumed as strings, not comments."""
cypher = "MATCH (n:AWSRole {url: 'https://example.com'}) RETURN n"
result = _inject(cypher)
assert "'https://example.com'" in result
assert f"(n:AWSRole:{LABEL}" in result
def test_escaped_quotes_in_string(self):
cypher = r"MATCH (n:AWSRole) WHERE n.name = 'it\'s a test' RETURN n"
result = _inject(cypher)
assert result.count(LABEL) == 1
# ---------------------------------------------------------------------------
# Clause splitting
# ---------------------------------------------------------------------------
class TestClauseSplitting:
def test_case_insensitive_keywords(self):
cypher = "match (n:AWSRole) where n.x = 1 return n"
result = _inject(cypher)
assert f"(n:AWSRole:{LABEL})" in result
def test_optional_match_with_extra_whitespace(self):
cypher = "OPTIONAL MATCH (n:AWSRole) RETURN n"
result = _inject(cypher)
assert f"(n:AWSRole:{LABEL})" in result
def test_multiple_match_clauses(self):
cypher = (
"MATCH (a:AWSAccount)--(b:AWSRole) "
"MATCH (b)--(c:AWSPolicy) "
"RETURN a, b, c"
)
result = _inject(cypher)
assert f"(a:AWSAccount:{LABEL})" in result
assert f"(b:AWSRole:{LABEL})" in result
assert f"(c:AWSPolicy:{LABEL})" in result
# (b) in second MATCH is bare and gets injected
assert result.count(LABEL) == 4 # a, b (labeled), b (bare in 2nd MATCH), c
# ---------------------------------------------------------------------------
# Real-world query patterns from aws.py
# ---------------------------------------------------------------------------
class TestRealWorldQueries:
def test_basic_resource_query(self):
cypher = (
"MATCH path = (aws:AWSAccount {id: $provider_uid})--(rds:RDSInstance)\n"
"UNWIND nodes(path) as n\n"
"OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding {status: 'FAIL'})\n"
"RETURN path, collect(DISTINCT pf) as dpf"
)
result = _inject(cypher)
assert f"(aws:AWSAccount:{LABEL} {{id: $provider_uid}})" in result
assert f"(rds:RDSInstance:{LABEL})" in result
assert f"(n:{LABEL})" in result
assert f"(pf:ProwlerFinding:{LABEL} {{status: 'FAIL'}})" in result
assert "nodes(path)" in result # function call untouched
assert "collect(DISTINCT pf)" in result # function call untouched
def test_privilege_escalation_query(self):
cypher = (
"MATCH path_principal = (aws:AWSAccount {id: $uid})"
"--(principal:AWSPrincipal)--(pol:AWSPolicy)\n"
"WHERE pol.effect = 'Allow'\n"
"MATCH (principal)--(cfn_policy:AWSPolicy)"
"--(stmt_cfn:AWSPolicyStatement)\n"
"WHERE any(action IN stmt_cfn.action WHERE toLower(action) = 'iam:passrole')\n"
"MATCH path_target = (aws)--(target_role:AWSRole)"
"-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {arn: 'cloudformation.amazonaws.com'})\n"
"RETURN path_principal, path_target"
)
result = _inject(cypher)
assert f"(aws:AWSAccount:{LABEL} {{id: $uid}})" in result
assert f"(principal:AWSPrincipal:{LABEL})" in result
assert f"(pol:AWSPolicy:{LABEL})" in result
assert f"(principal:{LABEL})" in result # bare in 2nd MATCH
assert f"(cfn_policy:AWSPolicy:{LABEL})" in result
assert f"(stmt_cfn:AWSPolicyStatement:{LABEL})" in result
assert f"(aws:{LABEL})" in result # bare in 3rd MATCH
assert f"(target_role:AWSRole:{LABEL})" in result
assert (
f"(:AWSPrincipal:{LABEL} {{arn: 'cloudformation.amazonaws.com'}})" in result
)
# Function calls in WHERE untouched
assert "any(action IN" in result
assert "toLower(action)" in result
def test_custom_bare_query(self):
cypher = (
"MATCH (a)-[:HAS_POLICY]->(b)\n"
"WHERE a.name CONTAINS 'admin'\n"
"RETURN a, b"
)
result = _inject(cypher)
assert f"(a:{LABEL})" in result
assert f"(b:{LABEL})" in result
assert result.count(LABEL) == 2
def test_internet_via_path_connectivity(self):
"""Post-refactor pattern: Internet reached via CAN_ACCESS, not standalone."""
cypher = (
"MATCH path = (aws:AWSAccount {id: $provider_uid})--(ec2:EC2Instance)\n"
"WHERE ec2.exposed_internet = true\n"
"OPTIONAL MATCH (internet:Internet)-[can_access:CAN_ACCESS]->(ec2)\n"
"RETURN path, internet, can_access"
)
result = _inject(cypher)
assert f"(aws:AWSAccount:{LABEL}" in result
assert f"(ec2:EC2Instance:{LABEL})" in result
assert f"(internet:Internet:{LABEL})" in result
# ec2 in OPTIONAL MATCH is bare, but already labeled via Pass A won't match it
# because it has no label. It IS bare, so Pass B injects.
assert f"(ec2:{LABEL})" in result
# ---------------------------------------------------------------------------
# Edge cases
# ---------------------------------------------------------------------------
class TestEdgeCases:
def test_empty_query(self):
assert _inject("") == ""
def test_no_node_patterns(self):
cypher = "RETURN 1 + 2"
assert _inject(cypher) == cypher
def test_anonymous_empty_parens_not_injected(self):
"""Empty () in MATCH is extremely rare but should not be injected."""
cypher = "MATCH ()--(m:AWSRole) RETURN m"
result = _inject(cypher)
assert "()" in result # empty parens untouched
assert f"(m:AWSRole:{LABEL})" in result
def test_fully_anonymous_query_bypasses_injection(self):
"""All-anonymous patterns bypass injection entirely.
MATCH ()--()--() has no labels and no variables, so neither Pass A
(labeled) nor Pass B (bare identifier) can inject the provider label.
This is safe because _serialize_graph() (Layer 3) filters every
returned node by provider label, dropping cross-provider data before
it reaches the user.
"""
cypher = "MATCH ()--()--() RETURN *"
result = _inject(cypher)
assert result == cypher # completely unmodified
assert LABEL not in result
def test_relationship_patterns_untouched(self):
cypher = "MATCH (a:X)-[r:REL_TYPE {x: 1}]->(b:Y) RETURN a"
result = _inject(cypher)
assert "[r:REL_TYPE {x: 1}]" in result # relationship untouched
assert f"(a:X:{LABEL})" in result
assert f"(b:Y:{LABEL})" in result
def test_call_subquery(self):
cypher = (
"CALL {\n"
" MATCH (inner:AWSRole) RETURN inner\n"
"}\n"
"MATCH (outer:AWSAccount) RETURN outer, inner"
)
result = _inject(cypher)
assert f"(inner:AWSRole:{LABEL})" in result
assert f"(outer:AWSAccount:{LABEL})" in result
def test_multiple_protected_regions(self):
cypher = (
"MATCH (n:X {a: 'hello'}) " 'WHERE n.b = "world" ' "// comment\n" "RETURN n"
)
result = _inject(cypher)
assert "'hello'" in result
assert '"world"' in result
assert "// comment" in result
assert f"(n:X:{LABEL}" in result
def test_idempotent_on_already_injected(self):
"""Running injection twice should add the label twice (not ideal, but predictable)."""
first = _inject("MATCH (n:AWSRole) RETURN n")
second = _inject(first)
# The label appears twice (stacked)
assert second.count(LABEL) == 2
# ---------------------------------------------------------------------------
# Validation
# ---------------------------------------------------------------------------
class TestValidation:
@pytest.mark.parametrize(
"cypher",
[
"LOAD CSV FROM 'http://169.254.169.254/' AS x RETURN x",
"load csv from 'http://evil.com' as row return row",
"CALL apoc.load.json('http://evil.com/') YIELD value RETURN value",
"CALL apoc.load.csvParams('http://evil.com/', {}, null) YIELD list RETURN list",
"CALL apoc.import.csv([{fileName: 'f'}], [], {}) YIELD node RETURN node",
"CALL apoc.export.csv.all('file.csv', {})",
"CALL apoc.cypher.run('CREATE (n)', {}) YIELD value RETURN value",
"CALL apoc.systemdb.graph() YIELD nodes RETURN nodes",
"CALL apoc.config.list() YIELD key, value RETURN key, value",
"CALL apoc.periodic.iterate('MATCH (n) RETURN n', 'DELETE n', {batchSize: 100})",
"CALL apoc.do.when(true, 'CREATE (n) RETURN n', '', {}) YIELD value RETURN value",
"CALL apoc.trigger.add('t', 'RETURN 1', {phase: 'before'})",
"CALL apoc.custom.asProcedure('myProc', 'RETURN 1')",
],
ids=[
"LOAD_CSV",
"LOAD_CSV_lowercase",
"apoc.load.json",
"apoc.load.csvParams",
"apoc.import.csv",
"apoc.export.csv",
"apoc.cypher.run",
"apoc.systemdb.graph",
"apoc.config.list",
"apoc.periodic.iterate",
"apoc.do.when",
"apoc.trigger.add",
"apoc.custom.asProcedure",
],
)
def test_rejects_blocked_patterns(self, cypher):
with pytest.raises(ValidationError) as exc:
validate_custom_query(cypher)
assert "blocked operation" in str(exc.value.detail)
@pytest.mark.parametrize(
"cypher",
[
"MATCH (n:AWSAccount) RETURN n LIMIT 10",
"MATCH (a)-[r]->(b) RETURN a, r, b",
"MATCH (n) WHERE n.name CONTAINS 'load' RETURN n",
"CALL apoc.create.vNode(['Label'], {}) YIELD node RETURN node",
"MATCH (n) WHERE n.name = 'apoc.load.json' RETURN n",
'MATCH (n) WHERE n.description = "LOAD CSV is cool" RETURN n',
],
ids=[
"simple_match",
"traversal",
"contains_load_substring",
"apoc_virtual_node",
"apoc_load_inside_single_quotes",
"load_csv_inside_double_quotes",
],
)
def test_allows_clean_queries(self, cypher):
validate_custom_query(cypher)
+1 -64
View File
@@ -2,7 +2,7 @@ import json
from unittest.mock import ANY, Mock, patch
import pytest
from conftest import TEST_PASSWORD, TODAY
from conftest import TODAY
from django.urls import reverse
from rest_framework import status
@@ -830,66 +830,3 @@ class TestUserRoleLinkPermissions:
)
assert response.status_code == status.HTTP_403_FORBIDDEN
@pytest.mark.django_db
class TestCrossTenantRoleLeak:
"""Regression tests for get_role() cross-tenant privilege leak.
get_role() must query admin_db (bypassing RLS) so that a user with a role
in tenant A cannot accidentally pass role checks when authenticated against
tenant B where they have no role.
"""
def test_user_with_role_in_tenant_a_denied_in_tenant_b(self, tenants_fixture):
"""User has admin role in tenant A, membership in tenant B but no role.
Hitting an RBAC-protected endpoint with a tenant-B token must return 403."""
from rest_framework.test import APIClient
tenant_a = tenants_fixture[0]
tenant_b = tenants_fixture[1]
user = User.objects.create_user(
name="cross_tenant_user",
email="cross_tenant@test.com",
password=TEST_PASSWORD,
)
Membership.objects.create(
user=user, tenant=tenant_a, role=Membership.RoleChoices.OWNER
)
Membership.objects.create(
user=user, tenant=tenant_b, role=Membership.RoleChoices.OWNER
)
# Role only in tenant A
role = Role.objects.create(
name="admin",
tenant_id=tenant_a.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.create(user=user, role=role, tenant_id=tenant_a.id)
# Mint token scoped to tenant B (where user has NO role)
serializer = TokenSerializer(
data={
"type": "tokens",
"email": "cross_tenant@test.com",
"password": TEST_PASSWORD,
"tenant_id": tenant_b.id,
}
)
serializer.is_valid(raise_exception=True)
access_token = serializer.validated_data["access"]
client = APIClient()
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
# user-list requires manage_users permission via HasPermissions
response = client.get(reverse("user-list"))
assert response.status_code == status.HTTP_403_FORBIDDEN
+12 -58
View File
@@ -4,25 +4,14 @@ from unittest.mock import MagicMock
from config.settings.sentry import before_send
def _make_log_record(msg, level=logging.ERROR, name="test", args=None):
"""Build a real LogRecord so getMessage() works like in production."""
record = logging.LogRecord(
name=name,
level=level,
pathname="",
lineno=0,
msg=msg,
args=args,
exc_info=None,
)
return record
def test_before_send_ignores_log_with_ignored_exception():
"""Test that before_send ignores logs containing ignored exceptions."""
log_record = _make_log_record("Provider kubernetes is not connected")
log_record = MagicMock()
log_record.msg = "Provider kubernetes is not connected"
log_record.levelno = logging.ERROR # 40
hint = {"log_record": log_record}
event = MagicMock()
result = before_send(event, hint)
@@ -47,9 +36,12 @@ def test_before_send_ignores_exception_with_ignored_exception():
def test_before_send_passes_through_non_ignored_log():
"""Test that before_send passes through logs that don't contain ignored exceptions."""
log_record = _make_log_record("Some other error message")
log_record = MagicMock()
log_record.msg = "Some other error message"
log_record.levelno = logging.ERROR # 40
hint = {"log_record": log_record}
event = MagicMock()
result = before_send(event, hint)
@@ -74,53 +66,15 @@ def test_before_send_passes_through_non_ignored_exception():
def test_before_send_handles_warning_level():
"""Test that before_send handles warning level logs."""
log_record = _make_log_record(
"Provider kubernetes is not connected", level=logging.WARNING
)
log_record = MagicMock()
log_record.msg = "Provider kubernetes is not connected"
log_record.levelno = logging.WARNING # 30
hint = {"log_record": log_record}
event = MagicMock()
result = before_send(event, hint)
# Assert that the event was dropped (None returned)
assert result is None
def test_before_send_ignores_neo4j_defunct_connection():
"""Test that before_send drops neo4j.io defunct connection logs.
The Neo4j driver logs transient connection errors at ERROR level
before RetryableSession retries them. These are noise.
The driver uses %s formatting, so "defunct" is in the args, not
in the template. This test mirrors the real LogRecord structure.
"""
log_record = _make_log_record(
msg="[#%04X] _: <CONNECTION> error: %s: %r",
name="neo4j.io",
args=(
0xE5CC,
"Failed to read from defunct connection "
"IPv4Address(('cloud-neo4j.prowler.com', 7687))",
ConnectionResetError(104, "Connection reset by peer"),
),
)
hint = {"log_record": log_record}
event = MagicMock()
assert before_send(event, hint) is None
def test_before_send_passes_non_defunct_neo4j_log():
"""Test that before_send passes through neo4j.io logs that are not about defunct connections."""
log_record = _make_log_record(
msg="Some other neo4j transport error",
name="neo4j.io",
)
hint = {"log_record": log_record}
event = MagicMock()
assert before_send(event, hint) == event
+5 -50
View File
@@ -33,7 +33,6 @@ from prowler.providers.m365.m365_provider import M365Provider
from prowler.providers.mongodbatlas.mongodbatlas_provider import MongodbatlasProvider
from prowler.providers.openstack.openstack_provider import OpenstackProvider
from prowler.providers.oraclecloud.oraclecloud_provider import OraclecloudProvider
from prowler.providers.vercel.vercel_provider import VercelProvider
class TestMergeDicts:
@@ -129,7 +128,6 @@ class TestReturnProwlerProvider:
(Provider.ProviderChoices.CLOUDFLARE.value, CloudflareProvider),
(Provider.ProviderChoices.OPENSTACK.value, OpenstackProvider),
(Provider.ProviderChoices.IMAGE.value, ImageProvider),
(Provider.ProviderChoices.VERCEL.value, VercelProvider),
],
)
def test_return_prowler_provider(self, provider_type, expected_provider):
@@ -220,24 +218,6 @@ class TestProwlerProviderConnectionTest:
registry_token="tok123",
)
@patch("api.utils.return_prowler_provider")
def test_prowler_provider_connection_test_vercel_provider(
self, mock_return_prowler_provider
):
"""Test connection test for Vercel provider passes team_id."""
provider = MagicMock()
provider.uid = "team_abcdef1234567890"
provider.provider = Provider.ProviderChoices.VERCEL.value
provider.secret.secret = {"api_token": "vercel_token_123"}
mock_return_prowler_provider.return_value = MagicMock()
prowler_provider_connection_test(provider)
mock_return_prowler_provider.return_value.test_connection.assert_called_once_with(
api_token="vercel_token_123",
team_id="team_abcdef1234567890",
raise_on_exception=False,
)
@patch("api.utils.return_prowler_provider")
def test_prowler_provider_connection_test_image_provider_no_creds(
self, mock_return_prowler_provider
@@ -304,10 +284,6 @@ class TestGetProwlerProviderKwargs:
Provider.ProviderChoices.OPENSTACK.value,
{},
),
(
Provider.ProviderChoices.VERCEL.value,
{"team_id": "provider_uid"},
),
],
)
def test_get_prowler_provider_kwargs(self, provider_type, expected_extra_kwargs):
@@ -806,15 +782,11 @@ class TestProwlerIntegrationConnectionTest:
}
integration.configuration = {}
# Mock successful JIRA connection with projects and issue types
# Mock successful JIRA connection with projects
mock_connection = MagicMock()
mock_connection.is_connected = True
mock_connection.error = None
mock_connection.projects = {"PROJ1": "Project 1", "PROJ2": "Project 2"}
mock_connection.issue_types = {
"PROJ1": ["Task", "Bug"],
"PROJ2": ["Task", "Story"],
}
mock_jira_class.test_connection.return_value = mock_connection
# Mock rls_transaction context manager
@@ -843,12 +815,6 @@ class TestProwlerIntegrationConnectionTest:
"PROJ2": "Project 2",
}
# Verify issue types were saved to integration configuration
assert integration.configuration["issue_types"] == {
"PROJ1": ["Task", "Bug"],
"PROJ2": ["Task", "Story"],
}
# Verify integration.save() was called
integration.save.assert_called_once()
@@ -872,7 +838,6 @@ class TestProwlerIntegrationConnectionTest:
mock_connection.is_connected = False
mock_connection.error = Exception("Authentication failed: Invalid credentials")
mock_connection.projects = {} # Empty projects when connection fails
mock_connection.issue_types = {} # Empty issue types when connection fails
mock_jira_class.test_connection.return_value = mock_connection
# Mock rls_transaction context manager
@@ -898,9 +863,6 @@ class TestProwlerIntegrationConnectionTest:
# Verify empty projects dict was saved to integration configuration
assert integration.configuration["projects"] == {}
# Verify empty issue types dict was saved to integration configuration
assert integration.configuration["issue_types"] == {}
# Verify integration.save() was called even on connection failure
integration.save.assert_called_once()
@@ -919,11 +881,11 @@ class TestProwlerIntegrationConnectionTest:
"domain": "example.atlassian.net",
}
integration.configuration = {
"issue_types": {"OLD_PROJ": ["Task"]}, # Existing configuration
"issue_types": ["Task"], # Existing configuration
"projects": {"OLD_PROJ": "Old Project"}, # Will be overwritten
}
# Mock successful JIRA connection with new projects and issue types
# Mock successful JIRA connection with new projects
mock_connection = MagicMock()
mock_connection.is_connected = True
mock_connection.error = None
@@ -931,10 +893,6 @@ class TestProwlerIntegrationConnectionTest:
"NEW_PROJ1": "New Project 1",
"NEW_PROJ2": "New Project 2",
}
mock_connection.issue_types = {
"NEW_PROJ1": ["Task", "Bug"],
"NEW_PROJ2": ["Story"],
}
mock_jira_class.test_connection.return_value = mock_connection
# Mock rls_transaction context manager
@@ -952,11 +910,8 @@ class TestProwlerIntegrationConnectionTest:
"NEW_PROJ2": "New Project 2",
}
# Verify issue types were also updated
assert integration.configuration["issue_types"] == {
"NEW_PROJ1": ["Task", "Bug"],
"NEW_PROJ2": ["Story"],
}
# Verify other configuration fields were preserved
assert integration.configuration["issue_types"] == ["Task"]
# Verify integration.save() was called
integration.save.assert_called_once()
File diff suppressed because it is too large Load Diff
-23
View File
@@ -39,7 +39,6 @@ if TYPE_CHECKING:
)
from prowler.providers.openstack.openstack_provider import OpenstackProvider
from prowler.providers.oraclecloud.oraclecloud_provider import OraclecloudProvider
from prowler.providers.vercel.vercel_provider import VercelProvider
class CustomOAuth2Client(OAuth2Client):
@@ -95,7 +94,6 @@ def return_prowler_provider(
| MongodbatlasProvider
| OpenstackProvider
| OraclecloudProvider
| VercelProvider
):
"""Return the Prowler provider class based on the given provider type.
@@ -177,10 +175,6 @@ def return_prowler_provider(
from prowler.providers.image.image_provider import ImageProvider
prowler_provider = ImageProvider
case Provider.ProviderChoices.VERCEL.value:
from prowler.providers.vercel.vercel_provider import VercelProvider
prowler_provider = VercelProvider
case _:
raise ValueError(f"Provider type {provider.provider} not supported")
return prowler_provider
@@ -241,11 +235,6 @@ def get_prowler_provider_kwargs(
# clouds_yaml_content, clouds_yaml_cloud and provider_id are validated
# in the provider itself, so it's not needed here.
pass
elif provider.provider == Provider.ProviderChoices.VERCEL.value:
prowler_provider_kwargs = {
**prowler_provider_kwargs,
"team_id": provider.uid,
}
elif provider.provider == Provider.ProviderChoices.IMAGE.value:
# Detect whether uid is a registry URL (e.g. "docker.io/andoniaf") or
# a concrete image reference (e.g. "docker.io/andoniaf/myimage:latest").
@@ -292,7 +281,6 @@ def initialize_prowler_provider(
| MongodbatlasProvider
| OpenstackProvider
| OraclecloudProvider
| VercelProvider
):
"""Initialize a Prowler provider instance based on the given provider type.
@@ -344,13 +332,6 @@ def prowler_provider_connection_test(provider: Provider) -> Connection:
"raise_on_exception": False,
}
return prowler_provider.test_connection(**openstack_kwargs)
elif provider.provider == Provider.ProviderChoices.VERCEL.value:
vercel_kwargs = {
**prowler_provider_kwargs,
"team_id": provider.uid,
"raise_on_exception": False,
}
return prowler_provider.test_connection(**vercel_kwargs)
elif provider.provider == Provider.ProviderChoices.IMAGE.value:
image_kwargs = {
"image": provider.uid,
@@ -434,12 +415,8 @@ def prowler_integration_connection_test(integration: Integration) -> Connection:
raise_on_exception=False,
)
project_keys = jira_connection.projects if jira_connection.is_connected else {}
issue_types = (
jira_connection.issue_types if jira_connection.is_connected else {}
)
with rls_transaction(str(integration.tenant_id)):
integration.configuration["projects"] = project_keys
integration.configuration["issue_types"] = issue_types
integration.save()
return jira_connection
elif integration.integration_type == Integration.IntegrationChoices.SLACK:
@@ -69,10 +69,8 @@ class SecurityHubConfigSerializer(BaseValidateSerializer):
class JiraConfigSerializer(BaseValidateSerializer):
domain = serializers.CharField(read_only=True)
issue_types = serializers.DictField(
read_only=True,
child=serializers.ListField(child=serializers.CharField()),
default={},
issue_types = serializers.ListField(
read_only=True, child=serializers.CharField(), default=["Task"]
)
projects = serializers.DictField(read_only=True)
@@ -404,17 +404,6 @@ from rest_framework_json_api import serializers
},
"required": ["clouds_yaml_content", "clouds_yaml_cloud"],
},
{
"type": "object",
"title": "Vercel API Token",
"properties": {
"api_token": {
"type": "string",
"description": "Vercel API token for authentication. Can be scoped to a specific team.",
},
},
"required": ["api_token"],
},
]
}
)
+5 -67
View File
@@ -1573,8 +1573,6 @@ class BaseWriteProviderSecretSerializer(BaseWriteSerializer):
serializer = OpenStackCloudsYamlProviderSecret(data=secret)
elif provider_type == Provider.ProviderChoices.IMAGE.value:
serializer = ImageProviderSecret(data=secret)
elif provider_type == Provider.ProviderChoices.VERCEL.value:
serializer = VercelProviderSecret(data=secret)
else:
raise serializers.ValidationError(
{"provider": f"Provider type not supported {provider_type}"}
@@ -1781,13 +1779,6 @@ class ImageProviderSecret(serializers.Serializer):
return attrs
class VercelProviderSecret(serializers.Serializer):
api_token = serializers.CharField()
class Meta:
resource_name = "provider-secrets"
class AlibabaCloudProviderSecret(serializers.Serializer):
access_key_id = serializers.CharField()
access_key_secret = serializers.CharField()
@@ -2722,11 +2713,11 @@ class BaseWriteIntegrationSerializer(BaseWriteSerializer):
)
config_serializer = JiraConfigSerializer
# Create non-editable configuration for JIRA integration
# issue_types will be populated per project when connection is tested
default_jira_issue_types = ["Task"]
configuration.update(
{
"projects": {},
"issue_types": {},
"issue_types": default_jira_issue_types,
"domain": credentials.get("domain"),
}
)
@@ -2941,25 +2932,13 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
return representation
class IntegrationJiraIssueTypesSerializer(BaseSerializerV1):
"""
Serializer for Jira issue types response.
"""
project_key = serializers.CharField(read_only=True)
issue_types = serializers.ListField(child=serializers.CharField(), read_only=True)
class JSONAPIMeta:
resource_name = "jira-issue-types"
class IntegrationJiraDispatchSerializer(BaseSerializerV1):
"""
Serializer for dispatching findings to JIRA integration.
"""
project_key = serializers.CharField(required=True)
issue_type = serializers.CharField(required=True)
issue_type = serializers.ChoiceField(required=True, choices=["Task"])
class JSONAPIMeta:
resource_name = "integrations-jira-dispatches"
@@ -2988,23 +2967,6 @@ class IntegrationJiraDispatchSerializer(BaseSerializerV1):
}
)
issue_type = attrs.get("issue_type")
available_issue_types = integration_instance.configuration.get(
"issue_types", {}
)
# Handle old format where issue_types was a flat list (e.g., ["Task"])
if not isinstance(available_issue_types, dict):
available_issue_types = {}
project_issue_types = available_issue_types.get(project_key, [])
if project_issue_types and issue_type not in project_issue_types:
raise ValidationError(
{
"issue_type": f"The issue type '{issue_type}' is not available for project '{project_key}'. "
f"Available types: {', '.join(project_issue_types)}. "
"Refresh the connection if this is an error."
}
)
return validated_attrs
@@ -4185,7 +4147,6 @@ class FindingGroupSerializer(BaseSerializerV1):
check_description = serializers.CharField(required=False, allow_null=True)
severity = serializers.CharField()
status = serializers.CharField()
muted = serializers.BooleanField()
impacted_providers = serializers.ListField(
child=serializers.CharField(), required=False
)
@@ -4193,25 +4154,9 @@ class FindingGroupSerializer(BaseSerializerV1):
resources_total = serializers.IntegerField()
pass_count = serializers.IntegerField()
fail_count = serializers.IntegerField()
manual_count = serializers.IntegerField()
pass_muted_count = serializers.IntegerField()
fail_muted_count = serializers.IntegerField()
manual_muted_count = serializers.IntegerField()
muted_count = serializers.IntegerField()
new_count = serializers.IntegerField()
changed_count = serializers.IntegerField()
new_fail_count = serializers.IntegerField()
new_fail_muted_count = serializers.IntegerField()
new_pass_count = serializers.IntegerField()
new_pass_muted_count = serializers.IntegerField()
new_manual_count = serializers.IntegerField()
new_manual_muted_count = serializers.IntegerField()
changed_fail_count = serializers.IntegerField()
changed_fail_muted_count = serializers.IntegerField()
changed_pass_count = serializers.IntegerField()
changed_pass_muted_count = serializers.IntegerField()
changed_manual_count = serializers.IntegerField()
changed_manual_muted_count = serializers.IntegerField()
first_seen_at = serializers.DateTimeField(required=False, allow_null=True)
last_seen_at = serializers.DateTimeField(required=False, allow_null=True)
failing_since = serializers.DateTimeField(required=False, allow_null=True)
@@ -4225,21 +4170,16 @@ class FindingGroupResourceSerializer(BaseSerializerV1):
Serializer for Finding Group Resources - resources within a finding group.
Returns individual resources with their current status, severity,
and timing information. Orphan findings (without any resource) expose the
finding id as `id` so the row stays identifiable in the UI.
and timing information.
"""
id = serializers.UUIDField(source="row_id")
id = serializers.UUIDField(source="resource_id")
resource = serializers.SerializerMethodField()
provider = serializers.SerializerMethodField()
finding_id = serializers.UUIDField()
status = serializers.CharField()
severity = serializers.CharField()
muted = serializers.BooleanField()
delta = serializers.CharField(required=False, allow_null=True)
first_seen_at = serializers.DateTimeField(required=False, allow_null=True)
last_seen_at = serializers.DateTimeField(required=False, allow_null=True)
muted_reason = serializers.CharField(required=False, allow_null=True)
class JSONAPIMeta:
resource_name = "finding-group-resources"
@@ -4253,7 +4193,6 @@ class FindingGroupResourceSerializer(BaseSerializerV1):
"service": {"type": "string"},
"region": {"type": "string"},
"type": {"type": "string"},
"resource_group": {"type": "string"},
},
}
)
@@ -4265,7 +4204,6 @@ class FindingGroupResourceSerializer(BaseSerializerV1):
"service": obj.get("resource_service", ""),
"region": obj.get("resource_region", ""),
"type": obj.get("resource_type", ""),
"resource_group": obj.get("resource_group", ""),
}
@extend_schema_field(
File diff suppressed because it is too large Load Diff
+1 -3
View File
@@ -17,10 +17,8 @@ celery_app.config_from_object("django.conf:settings", namespace="CELERY")
celery_app.conf.update(result_extended=True, result_expires=None)
celery_app.conf.broker_transport_options = {
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT,
"queue_order_strategy": "priority",
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
}
celery_app.conf.task_default_priority = 6
celery_app.conf.result_backend_transport_options = {
"visibility_timeout": BROKER_VISIBILITY_TIMEOUT
}
-5
View File
@@ -299,8 +299,3 @@ DJANGO_DELETION_BATCH_SIZE = env.int("DJANGO_DELETION_BATCH_SIZE", 5000)
# SAML requirement
CSRF_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
# Attack Paths
ATTACK_PATHS_SCAN_STALE_THRESHOLD_MINUTES = env.int(
"ATTACK_PATHS_SCAN_STALE_THRESHOLD_MINUTES", 2880
) # 48h
+1 -1
View File
@@ -15,7 +15,7 @@ from config.django.production import LOGGING as DJANGO_LOGGERS, DEBUG # noqa: E
from config.custom_logging import BackendLogger # noqa: E402
BIND_ADDRESS = env("DJANGO_BIND_ADDRESS", default="127.0.0.1")
PORT = env("DJANGO_PORT", default=8080)
PORT = env("DJANGO_PORT", default=8000)
# Server settings
bind = f"{BIND_ADDRESS}:{PORT}"
+1 -43
View File
@@ -1,52 +1,10 @@
from urllib.parse import quote
from config.env import env
_VALID_SCHEMES = {"redis", "rediss"}
def _build_celery_broker_url(
scheme: str,
username: str,
password: str,
host: str,
port: str,
db: str,
) -> str:
if scheme not in _VALID_SCHEMES:
raise ValueError(
f"Invalid VALKEY_SCHEME '{scheme}'. Must be one of: {', '.join(sorted(_VALID_SCHEMES))}"
)
encoded_username = quote(username, safe="") if username else ""
encoded_password = quote(password, safe="") if password else ""
auth = ""
if encoded_username and encoded_password:
auth = f"{encoded_username}:{encoded_password}@"
elif encoded_password:
auth = f":{encoded_password}@"
elif encoded_username:
auth = f"{encoded_username}@"
return f"{scheme}://{auth}{host}:{port}/{db}"
VALKEY_SCHEME = env("VALKEY_SCHEME", default="redis")
VALKEY_USERNAME = env("VALKEY_USERNAME", default="")
VALKEY_PASSWORD = env("VALKEY_PASSWORD", default="")
VALKEY_HOST = env("VALKEY_HOST", default="valkey")
VALKEY_PORT = env("VALKEY_PORT", default="6379")
VALKEY_DB = env("VALKEY_DB", default="0")
CELERY_BROKER_URL = _build_celery_broker_url(
VALKEY_SCHEME,
VALKEY_USERNAME,
VALKEY_PASSWORD,
VALKEY_HOST,
VALKEY_PORT,
VALKEY_DB,
)
CELERY_BROKER_URL = f"redis://{VALKEY_HOST}:{VALKEY_PORT}/{VALKEY_DB}"
CELERY_RESULT_BACKEND = "django-db"
CELERY_TASK_TRACK_STARTED = True
+2 -16
View File
@@ -1,5 +1,4 @@
import sentry_sdk
from config.env import env
IGNORED_EXCEPTIONS = [
@@ -86,20 +85,8 @@ def before_send(event, hint):
# Ignore logs with the ignored_exceptions
# https://docs.python.org/3/library/logging.html#logrecord-objects
if "log_record" in hint:
log_record = hint["log_record"]
log_msg = log_record.getMessage()
log_lvl = log_record.levelno
# The Neo4j driver logs transient connection errors (defunct
# connections, resets) at ERROR level via the `neo4j.io` logger.
# `RetryableSession` handles these with retries. If all retries
# are exhausted, the exception propagates and Sentry captures
# it as a normal exception event.
if (
getattr(log_record, "name", "").startswith("neo4j.io")
and "defunct" in log_msg
):
return None
log_msg = hint["log_record"].msg
log_lvl = hint["log_record"].levelno
# Handle Error and Critical events and discard the rest
if log_lvl <= 40 and any(ignored in log_msg for ignored in IGNORED_EXCEPTIONS):
@@ -120,7 +107,6 @@ sentry_sdk.init(
# see https://docs.sentry.io/platforms/python/data-management/data-collected/ for more info
before_send=before_send,
send_default_pii=True,
traces_sample_rate=env.float("DJANGO_SENTRY_TRACES_SAMPLE_RATE", default=0.02),
_experiments={
# Set continuous_profiling_auto_start to True
# to automatically start the profiler on when
+2 -110
View File
@@ -111,9 +111,8 @@ def disable_logging():
logging.disable(logging.CRITICAL)
@pytest.fixture(scope="session")
def _session_test_user(django_db_setup, django_db_blocker):
"""Create the test user once per session. Internal; use create_test_user instead."""
@pytest.fixture(scope="session", autouse=True)
def create_test_user(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
user = User.objects.create_user(
name="testing",
@@ -123,21 +122,6 @@ def _session_test_user(django_db_setup, django_db_blocker):
return user
@pytest.fixture(autouse=True)
def create_test_user(_session_test_user, django_db_blocker):
"""Re-create the session-scoped test user when a TransactionTestCase
has truncated the users table."""
with django_db_blocker.unblock():
if not User.objects.filter(pk=_session_test_user.pk).exists():
User.objects.create_user(
id=_session_test_user.pk,
name="testing",
email=TEST_USER,
password=TEST_PASSWORD,
)
return _session_test_user
@pytest.fixture(scope="function")
def create_test_user_rbac(django_db_setup, django_db_blocker, tenants_fixture):
with django_db_blocker.unblock():
@@ -565,12 +549,6 @@ def providers_fixture(tenants_fixture):
alias="googleworkspace_testing",
tenant_id=tenant.id,
)
provider13 = Provider.objects.create(
provider="vercel",
uid="team_abcdef1234567890ab",
alias="vercel_testing",
tenant_id=tenant.id,
)
return (
provider1,
@@ -585,7 +563,6 @@ def providers_fixture(tenants_fixture):
provider10,
provider11,
provider12,
provider13,
)
@@ -2035,7 +2012,6 @@ def finding_groups_fixture(
"CheckId": "s3_bucket_public_access",
"checktitle": "Ensure S3 buckets do not allow public access",
"Description": "S3 buckets should be configured to restrict public access.",
"resourcegroup": "storage",
},
first_seen_at="2024-01-02T00:00:00Z",
muted=False,
@@ -2060,7 +2036,6 @@ def finding_groups_fixture(
"CheckId": "s3_bucket_public_access",
"checktitle": "Ensure S3 buckets do not allow public access",
"Description": "S3 buckets should be configured to restrict public access.",
"resourcegroup": "storage",
},
first_seen_at="2024-01-03T00:00:00Z",
muted=False,
@@ -2259,89 +2234,6 @@ def finding_groups_fixture(
return findings
@pytest.fixture
def finding_groups_title_variants_fixture(
tenants_fixture, providers_fixture, scans_fixture, resources_fixture
):
"""
Two providers report the same check_id with different checktitle values.
Simulates a Prowler version upgrade where the check title changed but the
check_id stayed the same. Used to verify that check_title__icontains
resolves to check_id first, so results include all providers regardless
of which title variant matches the search term.
"""
tenant = tenants_fixture[0]
provider1, provider2, *_ = providers_fixture
scan1, scan2, *_ = scans_fixture
resource1, resource2, *_ = resources_fixture
findings = []
# Provider 1 — OLD title variant
finding_old = Finding.objects.create(
tenant_id=tenant.id,
uid="fg_title_variant_old",
scan=scan1,
delta="new",
status=Status.FAIL,
status_extended="Secret scanning not enabled",
impact=Severity.high,
impact_extended="High risk",
severity=Severity.high,
raw_result={"status": Status.FAIL, "severity": Severity.high},
tags={},
check_id="github_secret_scanning_enabled",
check_metadata={
"CheckId": "github_secret_scanning_enabled",
"checktitle": "Ensure repository has secret scanning enabled",
"Description": "Checks if secret scanning is enabled.",
},
first_seen_at="2024-01-01T00:00:00Z",
muted=False,
)
finding_old.add_resources([resource1])
findings.append(finding_old)
# Provider 2 — NEW title variant (same check_id, different checktitle)
finding_new = Finding.objects.create(
tenant_id=tenant.id,
uid="fg_title_variant_new",
scan=scan2,
delta="new",
status=Status.FAIL,
status_extended="Secret scanning not enabled on repo",
impact=Severity.high,
impact_extended="High risk",
severity=Severity.high,
raw_result={"status": Status.FAIL, "severity": Severity.high},
tags={},
check_id="github_secret_scanning_enabled",
check_metadata={
"CheckId": "github_secret_scanning_enabled",
"checktitle": "Check if secret scanning is enabled in GitHub",
"Description": "Checks if secret scanning is enabled.",
},
first_seen_at="2024-01-02T00:00:00Z",
muted=False,
)
finding_new.add_resources([resource2])
findings.append(finding_new)
from tasks.jobs.scan import aggregate_finding_group_summaries
aggregate_finding_group_summaries(
tenant_id=str(tenant.id),
scan_id=str(scan1.id),
)
aggregate_finding_group_summaries(
tenant_id=str(tenant.id),
scan_id=str(scan2.id),
)
return findings
def pytest_collection_modifyitems(items):
"""Ensure test_rbac.py is executed first."""
items.sort(key=lambda item: 0 if "test_rbac.py" in item.nodeid else 1)
+10 -59
View File
@@ -1,8 +1,6 @@
# Portions of this file are based on code from the Cartography project
# (https://github.com/cartography-cncf/cartography), which is licensed under the Apache 2.0 License.
import time
from typing import Any
import aioboto3
@@ -35,7 +33,7 @@ def start_aws_ingestion(
For the scan progress updates:
- The caller of this function (`tasks.jobs.attack_paths.scan.run`) has set it to 2.
- When the control returns to the caller, it will be set to 93.
- When the control returns to the caller, it will be set to 95.
"""
# Initialize variables common to all jobs
@@ -91,50 +89,34 @@ def start_aws_ingestion(
logger.info(
f"Syncing function permission_relationships for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.RESOURCE_FUNCTIONS["permission_relationships"](**sync_args)
logger.info(
f"Synced function permission_relationships for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 88)
if "resourcegroupstaggingapi" in requested_syncs:
logger.info(
f"Syncing function resourcegroupstaggingapi for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.RESOURCE_FUNCTIONS["resourcegroupstaggingapi"](**sync_args)
logger.info(
f"Synced function resourcegroupstaggingapi for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 89)
logger.info(
f"Syncing ec2_iaminstanceprofile scoped analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_scoped_analysis_job(
"aws_ec2_iaminstanceprofile.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced ec2_iaminstanceprofile scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 90)
logger.info(
f"Syncing lambda_ecr analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_analysis_job(
"aws_lambda_ecr.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced lambda_ecr analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
if all(
s in requested_syncs
@@ -143,34 +125,25 @@ def start_aws_ingestion(
logger.info(
f"Syncing lb_container_exposure scoped analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_scoped_analysis_job(
"aws_lb_container_exposure.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced lb_container_exposure scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
if all(s in requested_syncs for s in ["ec2:network_acls", "ec2:load_balancer_v2"]):
logger.info(
f"Syncing lb_nacl_direct scoped analysis for AWS account {prowler_api_provider.uid}"
)
t0 = time.perf_counter()
cartography_aws.run_scoped_analysis_job(
"aws_lb_nacl_direct.json",
neo4j_session,
common_job_parameters,
)
logger.info(
f"Synced lb_nacl_direct scoped analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 91)
logger.info(f"Syncing metadata for AWS account {prowler_api_provider.uid}")
t0 = time.perf_counter()
cartography_aws.merge_module_sync_metadata(
neo4j_session,
group_type="AWSAccount",
@@ -179,23 +152,24 @@ def start_aws_ingestion(
update_tag=cartography_config.update_tag,
stat_handler=cartography_aws.stat_handler,
)
logger.info(
f"Synced metadata for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 92)
# Removing the added extra field
del common_job_parameters["AWS_ID"]
logger.info(f"Syncing cleanup_job for AWS account {prowler_api_provider.uid}")
cartography_aws.run_cleanup_job(
"aws_post_ingestion_principals_cleanup.json",
neo4j_session,
common_job_parameters,
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 93)
logger.info(f"Syncing analysis for AWS account {prowler_api_provider.uid}")
t0 = time.perf_counter()
cartography_aws._perform_aws_analysis(
requested_syncs, neo4j_session, common_job_parameters
)
logger.info(
f"Synced analysis for AWS account {prowler_api_provider.uid} in {time.perf_counter() - t0:.3f}s"
)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 93)
db_utils.update_attack_paths_scan_progress(attack_paths_scan, 94)
return failed_syncs
@@ -260,8 +234,6 @@ def sync_aws_account(
)
try:
func_t0 = time.perf_counter()
# `ecr:image_layers` uses `aioboto3_session` instead of `boto3_session`
if func_name == "ecr:image_layers":
cartography_aws.RESOURCE_FUNCTIONS[func_name](
@@ -285,15 +257,7 @@ def sync_aws_account(
else:
cartography_aws.RESOURCE_FUNCTIONS[func_name](**sync_args)
logger.info(
f"Synced function {func_name} for AWS account {prowler_api_provider.uid} in {time.perf_counter() - func_t0:.3f}s"
)
except Exception as e:
logger.info(
f"Synced function {func_name} for AWS account {prowler_api_provider.uid} in {time.perf_counter() - func_t0:.3f}s (FAILED)"
)
exception_message = utils.stringify_exception(
e, f"Exception for AWS sync function: {func_name}"
)
@@ -313,16 +277,3 @@ def sync_aws_account(
)
return failed_syncs
def extract_short_uid(uid: str) -> str:
"""Return the short identifier from an AWS ARN or resource ID.
Supported inputs end in one of:
- `<type>/<id>` (e.g. `instance/i-xxx`)
- `<type>:<id>` (e.g. `function:name`)
- `<id>` (e.g. `bucket-name` or `i-xxx`)
If `uid` is already a short resource ID, it is returned unchanged.
"""
return uid.rsplit("/", 1)[-1].rsplit(":", 1)[-1]
@@ -1,152 +0,0 @@
from datetime import datetime, timedelta, timezone
from celery import current_app, states
from celery.utils.log import get_task_logger
from config.django.base import ATTACK_PATHS_SCAN_STALE_THRESHOLD_MINUTES
from tasks.jobs.attack_paths.db_utils import (
_mark_scan_finished,
recover_graph_data_ready,
)
from api.attack_paths import database as graph_database
from api.db_router import MainRouter
from api.db_utils import rls_transaction
from api.models import AttackPathsScan, StateChoices
logger = get_task_logger(__name__)
def cleanup_stale_attack_paths_scans() -> dict:
"""
Find `EXECUTING` `AttackPathsScan` scans whose workers are dead or that have
exceeded the stale threshold, and mark them as `FAILED`.
Two-pass detection:
1. If `TaskResult.worker` exists, ping the worker.
- Dead worker: cleanup immediately (any age).
- Alive + past threshold: revoke the task, then cleanup.
- Alive + within threshold: skip.
2. If no worker field: fall back to time-based heuristic only.
"""
threshold = timedelta(minutes=ATTACK_PATHS_SCAN_STALE_THRESHOLD_MINUTES)
now = datetime.now(tz=timezone.utc)
cutoff = now - threshold
executing_scans = (
AttackPathsScan.all_objects.using(MainRouter.admin_db)
.filter(state=StateChoices.EXECUTING)
.select_related("task__task_runner_task")
)
# Cache worker liveness so each worker is pinged at most once
executing_scans = list(executing_scans)
workers = {
tr.worker
for scan in executing_scans
if (tr := getattr(scan.task, "task_runner_task", None) if scan.task else None)
and tr.worker
}
worker_alive = {w: _is_worker_alive(w) for w in workers}
cleaned_up = []
for scan in executing_scans:
task_result = (
getattr(scan.task, "task_runner_task", None) if scan.task else None
)
worker = task_result.worker if task_result else None
if worker:
alive = worker_alive.get(worker, True)
if alive:
if scan.started_at and scan.started_at >= cutoff:
continue
# Alive but stale — revoke before cleanup
_revoke_task(task_result)
reason = (
"Scan exceeded stale threshold — " "cleaned up by periodic task"
)
else:
reason = "Worker dead — cleaned up by periodic task"
else:
# No worker recorded — time-based heuristic only
if scan.started_at and scan.started_at >= cutoff:
continue
reason = (
"No worker recorded, scan exceeded stale threshold — "
"cleaned up by periodic task"
)
if _cleanup_scan(scan, task_result, reason):
cleaned_up.append(str(scan.id))
logger.info(
f"Stale `AttackPathsScan` cleanup: {len(cleaned_up)} scan(s) cleaned up"
)
return {"cleaned_up_count": len(cleaned_up), "scan_ids": cleaned_up}
def _is_worker_alive(worker: str) -> bool:
"""Ping a specific Celery worker. Returns `True` if it responds or on error."""
try:
response = current_app.control.inspect(destination=[worker], timeout=1.0).ping()
return response is not None and worker in response
except Exception:
logger.exception(f"Failed to ping worker {worker}, treating as alive")
return True
def _revoke_task(task_result) -> None:
"""Send `SIGTERM` to a hung Celery task. Non-fatal on failure."""
try:
current_app.control.revoke(
task_result.task_id, terminate=True, signal="SIGTERM"
)
logger.info(f"Revoked task {task_result.task_id}")
except Exception:
logger.exception(f"Failed to revoke task {task_result.task_id}")
def _cleanup_scan(scan, task_result, reason: str) -> bool:
"""
Clean up a single stale `AttackPathsScan`:
drop temp DB, mark `FAILED`, update `TaskResult`, recover `graph_data_ready`.
Returns `True` if the scan was actually cleaned up, `False` if skipped.
"""
scan_id_str = str(scan.id)
# 1. Drop temp Neo4j database
tmp_db_name = graph_database.get_database_name(scan.id, temporary=True)
try:
graph_database.drop_database(tmp_db_name)
except Exception:
logger.exception(f"Failed to drop temp database {tmp_db_name}")
# 2. Lock row, verify still EXECUTING, mark FAILED — all atomic
with rls_transaction(str(scan.tenant_id)):
try:
fresh_scan = AttackPathsScan.objects.select_for_update().get(id=scan.id)
except AttackPathsScan.DoesNotExist:
logger.warning(f"Scan {scan_id_str} no longer exists, skipping")
return False
if fresh_scan.state != StateChoices.EXECUTING:
logger.info(f"Scan {scan_id_str} is now {fresh_scan.state}, skipping")
return False
_mark_scan_finished(fresh_scan, StateChoices.FAILED, {"global_error": reason})
# 3. Mark `TaskResult` as `FAILURE` (not RLS-protected, outside lock)
if task_result:
task_result.status = states.FAILURE
task_result.date_done = datetime.now(tz=timezone.utc)
task_result.save(update_fields=["status", "date_done"])
# 4. Recover graph_data_ready if provider data still exists
recover_graph_data_ready(fresh_scan)
logger.info(f"Cleaned up stale scan {scan_id_str}: {reason}")
return True

Some files were not shown because too many files have changed in this diff Show More