mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-02-09 15:10:36 +00:00
Compare commits
138 Commits
feat/PROWL
...
PROWLER-82
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
15e3c6c158 | ||
|
|
7647d08357 | ||
|
|
ebd5283975 | ||
|
|
ecc8eaf366 | ||
|
|
619d1ffc62 | ||
|
|
9e20cb2e5a | ||
|
|
cb76e77851 | ||
|
|
a24f818547 | ||
|
|
e07687ce67 | ||
|
|
d016039b18 | ||
|
|
ac013ec6fc | ||
|
|
00c57cea8d | ||
|
|
1b2c08649e | ||
|
|
47317680e5 | ||
|
|
c7558a9f78 | ||
|
|
86ecec542b | ||
|
|
cb82a42035 | ||
|
|
2c69eb58c9 | ||
|
|
fcd9e2d40f | ||
|
|
f0c69874e0 | ||
|
|
21444f7880 | ||
|
|
3e3f56629f | ||
|
|
38f6ca9514 | ||
|
|
7f71b93eec | ||
|
|
12752a5839 | ||
|
|
eb76e2b986 | ||
|
|
42c56fa33a | ||
|
|
5dcdeed782 | ||
|
|
c35eaa8aa9 | ||
|
|
bca7c3a479 | ||
|
|
e03fb88ca2 | ||
|
|
cecf288d4f | ||
|
|
8c4d251c51 | ||
|
|
98d4e08cbb | ||
|
|
3c004582d7 | ||
|
|
726aeec64b | ||
|
|
3d1a0b1270 | ||
|
|
b014fdbde3 | ||
|
|
d693a34747 | ||
|
|
a6860ffa7d | ||
|
|
d06af16a5c | ||
|
|
0250bc3b0e | ||
|
|
ee1e6c35f2 | ||
|
|
2e552c65a5 | ||
|
|
910514e964 | ||
|
|
695a3466cd | ||
|
|
7590ed7913 | ||
|
|
7fb82b0650 | ||
|
|
fa21a300fb | ||
|
|
d51fa60e58 | ||
|
|
9d69d3a25f | ||
|
|
74da022e48 | ||
|
|
da2b6d028b | ||
|
|
55d8a5d664 | ||
|
|
ecc1cf8b04 | ||
|
|
394a62fab1 | ||
|
|
0ef68f55a1 | ||
|
|
94b14d1592 | ||
|
|
36ac1bc47e | ||
|
|
41de65ceaa | ||
|
|
12f95e3a19 | ||
|
|
f5cada05c3 | ||
|
|
bfd8abdd89 | ||
|
|
99d2736116 | ||
|
|
b11e074f41 | ||
|
|
a4e084afc9 | ||
|
|
2bba3efbb1 | ||
|
|
a2af18885d | ||
|
|
edcac4e4bc | ||
|
|
71ed16ee29 | ||
|
|
f2b2f0af95 | ||
|
|
90ace9265b | ||
|
|
773e4b23b5 | ||
|
|
9a22b1238a | ||
|
|
80095603fd | ||
|
|
f234c16015 | ||
|
|
b3b1ee3252 | ||
|
|
3ba5d43c64 | ||
|
|
3720f1d235 | ||
|
|
21868d7741 | ||
|
|
ba4f93ec36 | ||
|
|
5b00846afe | ||
|
|
a5a5c35f90 | ||
|
|
5d894dcf94 | ||
|
|
5c0f9b19b0 | ||
|
|
a7d8c8f679 | ||
|
|
979ae1150c | ||
|
|
6d182f3efd | ||
|
|
8f937e4530 | ||
|
|
9dd149d20a | ||
|
|
8e96f8361a | ||
|
|
0822692903 | ||
|
|
7c4b814b43 | ||
|
|
c0b63e8564 | ||
|
|
e3ae9b37d0 | ||
|
|
95087dcba7 | ||
|
|
346c17b57d | ||
|
|
c8af89aa23 | ||
|
|
1d386e7f27 | ||
|
|
8e2d2f00e6 | ||
|
|
22d1daf3c4 | ||
|
|
32f39e2366 | ||
|
|
c7050b1979 | ||
|
|
c612637a86 | ||
|
|
f6373387fd | ||
|
|
b390d6925b | ||
|
|
8d76e923cf | ||
|
|
25b732655c | ||
|
|
f8fe1a3655 | ||
|
|
05a07567b5 | ||
|
|
9d658ef531 | ||
|
|
557b5aa480 | ||
|
|
c1140dfcc0 | ||
|
|
49e02bfbd1 | ||
|
|
f086106d53 | ||
|
|
55e55bc7a3 | ||
|
|
f9efe08984 | ||
|
|
5ff390e6fb | ||
|
|
72d2ff40f2 | ||
|
|
c667ff91be | ||
|
|
1e31fe7441 | ||
|
|
e4d1d647c5 | ||
|
|
4d078aece5 | ||
|
|
c2c73db4e7 | ||
|
|
6e736fcdac | ||
|
|
5bb5fbe468 | ||
|
|
c0da0f909f | ||
|
|
aa2b86f96d | ||
|
|
c005959835 | ||
|
|
7ea76a71f7 | ||
|
|
e89e50a3de | ||
|
|
943c5bf2ea | ||
|
|
bac6aa85c0 | ||
|
|
22540fc0ae | ||
|
|
3a335583df | ||
|
|
6668931db7 | ||
|
|
6202b45a97 | ||
|
|
2636351f5d |
17
.github/test-impact.yml
vendored
17
.github/test-impact.yml
vendored
@@ -14,7 +14,7 @@ ignored:
|
||||
- "*.md"
|
||||
- "**/*.md"
|
||||
- mkdocs.yml
|
||||
|
||||
|
||||
# Config files that don't affect runtime
|
||||
- .gitignore
|
||||
- .gitattributes
|
||||
@@ -23,7 +23,7 @@ ignored:
|
||||
- .backportrc.json
|
||||
- CODEOWNERS
|
||||
- LICENSE
|
||||
|
||||
|
||||
# IDE/Editor configs
|
||||
- .vscode/**
|
||||
- .idea/**
|
||||
@@ -31,10 +31,13 @@ ignored:
|
||||
# Examples and contrib (not production code)
|
||||
- examples/**
|
||||
- contrib/**
|
||||
|
||||
|
||||
# Skills (AI agent configs, not runtime)
|
||||
- skills/**
|
||||
|
||||
|
||||
# E2E setup helpers (not runnable tests)
|
||||
- ui/tests/setups/**
|
||||
|
||||
# Permissions docs
|
||||
- permissions/**
|
||||
|
||||
@@ -47,18 +50,18 @@ critical:
|
||||
- prowler/config/**
|
||||
- prowler/exceptions/**
|
||||
- prowler/providers/common/**
|
||||
|
||||
|
||||
# API Core
|
||||
- api/src/backend/api/models.py
|
||||
- api/src/backend/config/**
|
||||
- api/src/backend/conftest.py
|
||||
|
||||
|
||||
# UI Core
|
||||
- ui/lib/**
|
||||
- ui/types/**
|
||||
- ui/config/**
|
||||
- ui/middleware.ts
|
||||
|
||||
|
||||
# CI/CD changes
|
||||
- .github/workflows/**
|
||||
- .github/test-impact.yml
|
||||
|
||||
15
.github/workflows/ui-e2e-tests-v2.yml
vendored
15
.github/workflows/ui-e2e-tests-v2.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
e2e-tests:
|
||||
needs: impact-analysis
|
||||
if: |
|
||||
github.repository == 'prowler-cloud/prowler' &&
|
||||
github.repository == 'prowler-cloud/prowler' &&
|
||||
(needs.impact-analysis.outputs.has-ui-e2e == 'true' || needs.impact-analysis.outputs.run-all == 'true')
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
@@ -200,7 +200,14 @@ jobs:
|
||||
# e.g., "ui/tests/providers/**" -> "tests/providers"
|
||||
TEST_PATHS="${{ env.E2E_TEST_PATHS }}"
|
||||
# Remove ui/ prefix and convert ** to empty (playwright handles recursion)
|
||||
TEST_PATHS=$(echo "$TEST_PATHS" | sed 's|ui/||g' | sed 's|\*\*||g' | tr ' ' '\n' | sort -u | tr '\n' ' ')
|
||||
TEST_PATHS=$(echo "$TEST_PATHS" | sed 's|ui/||g' | sed 's|\*\*||g' | tr ' ' '\n' | sort -u)
|
||||
# Drop auth setup helpers (not runnable test suites)
|
||||
TEST_PATHS=$(echo "$TEST_PATHS" | grep -v '^tests/setups/')
|
||||
if [[ -z "$TEST_PATHS" ]]; then
|
||||
echo "No runnable E2E test paths after filtering setups"
|
||||
exit 0
|
||||
fi
|
||||
TEST_PATHS=$(echo "$TEST_PATHS" | tr '\n' ' ')
|
||||
echo "Resolved test paths: $TEST_PATHS"
|
||||
pnpm exec playwright test $TEST_PATHS
|
||||
fi
|
||||
@@ -222,8 +229,8 @@ jobs:
|
||||
skip-e2e:
|
||||
needs: impact-analysis
|
||||
if: |
|
||||
github.repository == 'prowler-cloud/prowler' &&
|
||||
needs.impact-analysis.outputs.has-ui-e2e != 'true' &&
|
||||
github.repository == 'prowler-cloud/prowler' &&
|
||||
needs.impact-analysis.outputs.has-ui-e2e != 'true' &&
|
||||
needs.impact-analysis.outputs.run-all != 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
172
.github/workflows/ui-e2e-tests.yml
vendored
172
.github/workflows/ui-e2e-tests.yml
vendored
@@ -1,172 +0,0 @@
|
||||
name: UI - E2E Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- "v5.*"
|
||||
paths:
|
||||
- '.github/workflows/ui-e2e-tests.yml'
|
||||
- 'ui/**'
|
||||
|
||||
jobs:
|
||||
|
||||
e2e-tests:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AUTH_SECRET: 'fallback-ci-secret-for-testing'
|
||||
AUTH_TRUST_HOST: true
|
||||
NEXTAUTH_URL: 'http://localhost:3000'
|
||||
NEXT_PUBLIC_API_BASE_URL: 'http://localhost:8080/api/v1'
|
||||
E2E_ADMIN_USER: ${{ secrets.E2E_ADMIN_USER }}
|
||||
E2E_ADMIN_PASSWORD: ${{ secrets.E2E_ADMIN_PASSWORD }}
|
||||
E2E_AWS_PROVIDER_ACCOUNT_ID: ${{ secrets.E2E_AWS_PROVIDER_ACCOUNT_ID }}
|
||||
E2E_AWS_PROVIDER_ACCESS_KEY: ${{ secrets.E2E_AWS_PROVIDER_ACCESS_KEY }}
|
||||
E2E_AWS_PROVIDER_SECRET_KEY: ${{ secrets.E2E_AWS_PROVIDER_SECRET_KEY }}
|
||||
E2E_AWS_PROVIDER_ROLE_ARN: ${{ secrets.E2E_AWS_PROVIDER_ROLE_ARN }}
|
||||
E2E_AZURE_SUBSCRIPTION_ID: ${{ secrets.E2E_AZURE_SUBSCRIPTION_ID }}
|
||||
E2E_AZURE_CLIENT_ID: ${{ secrets.E2E_AZURE_CLIENT_ID }}
|
||||
E2E_AZURE_SECRET_ID: ${{ secrets.E2E_AZURE_SECRET_ID }}
|
||||
E2E_AZURE_TENANT_ID: ${{ secrets.E2E_AZURE_TENANT_ID }}
|
||||
E2E_M365_DOMAIN_ID: ${{ secrets.E2E_M365_DOMAIN_ID }}
|
||||
E2E_M365_CLIENT_ID: ${{ secrets.E2E_M365_CLIENT_ID }}
|
||||
E2E_M365_SECRET_ID: ${{ secrets.E2E_M365_SECRET_ID }}
|
||||
E2E_M365_TENANT_ID: ${{ secrets.E2E_M365_TENANT_ID }}
|
||||
E2E_M365_CERTIFICATE_CONTENT: ${{ secrets.E2E_M365_CERTIFICATE_CONTENT }}
|
||||
E2E_KUBERNETES_CONTEXT: 'kind-kind'
|
||||
E2E_KUBERNETES_KUBECONFIG_PATH: /home/runner/.kube/config
|
||||
E2E_GCP_BASE64_SERVICE_ACCOUNT_KEY: ${{ secrets.E2E_GCP_BASE64_SERVICE_ACCOUNT_KEY }}
|
||||
E2E_GCP_PROJECT_ID: ${{ secrets.E2E_GCP_PROJECT_ID }}
|
||||
E2E_GITHUB_APP_ID: ${{ secrets.E2E_GITHUB_APP_ID }}
|
||||
E2E_GITHUB_BASE64_APP_PRIVATE_KEY: ${{ secrets.E2E_GITHUB_BASE64_APP_PRIVATE_KEY }}
|
||||
E2E_GITHUB_USERNAME: ${{ secrets.E2E_GITHUB_USERNAME }}
|
||||
E2E_GITHUB_PERSONAL_ACCESS_TOKEN: ${{ secrets.E2E_GITHUB_PERSONAL_ACCESS_TOKEN }}
|
||||
E2E_GITHUB_ORGANIZATION: ${{ secrets.E2E_GITHUB_ORGANIZATION }}
|
||||
E2E_GITHUB_ORGANIZATION_ACCESS_TOKEN: ${{ secrets.E2E_GITHUB_ORGANIZATION_ACCESS_TOKEN }}
|
||||
E2E_ORGANIZATION_ID: ${{ secrets.E2E_ORGANIZATION_ID }}
|
||||
E2E_OCI_TENANCY_ID: ${{ secrets.E2E_OCI_TENANCY_ID }}
|
||||
E2E_OCI_USER_ID: ${{ secrets.E2E_OCI_USER_ID }}
|
||||
E2E_OCI_FINGERPRINT: ${{ secrets.E2E_OCI_FINGERPRINT }}
|
||||
E2E_OCI_KEY_CONTENT: ${{ secrets.E2E_OCI_KEY_CONTENT }}
|
||||
E2E_OCI_REGION: ${{ secrets.E2E_OCI_REGION }}
|
||||
E2E_NEW_USER_PASSWORD: ${{ secrets.E2E_NEW_USER_PASSWORD }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: helm/kind-action@v1
|
||||
with:
|
||||
cluster_name: kind
|
||||
- name: Modify kubeconfig
|
||||
run: |
|
||||
# Modify the kubeconfig to use the kind cluster server to https://kind-control-plane:6443
|
||||
# from worker service into docker-compose.yml
|
||||
kubectl config set-cluster kind-kind --server=https://kind-control-plane:6443
|
||||
kubectl config view
|
||||
- name: Add network kind to docker compose
|
||||
run: |
|
||||
# Add the network kind to the docker compose to interconnect to kind cluster
|
||||
yq -i '.networks.kind.external = true' docker-compose.yml
|
||||
# Add network kind to worker service and default network too
|
||||
yq -i '.services.worker.networks = ["kind","default"]' docker-compose.yml
|
||||
- name: Fix API data directory permissions
|
||||
run: docker run --rm -v $(pwd)/_data/api:/data alpine chown -R 1000:1000 /data
|
||||
- name: Add AWS credentials for testing AWS SDK Default Adding Provider
|
||||
run: |
|
||||
echo "Adding AWS credentials for testing AWS SDK Default Adding Provider..."
|
||||
echo "AWS_ACCESS_KEY_ID=${{ secrets.E2E_AWS_PROVIDER_ACCESS_KEY }}" >> .env
|
||||
echo "AWS_SECRET_ACCESS_KEY=${{ secrets.E2E_AWS_PROVIDER_SECRET_KEY }}" >> .env
|
||||
- name: Start API services
|
||||
run: |
|
||||
# Override docker-compose image tag to use latest instead of stable
|
||||
# This overrides any PROWLER_API_VERSION set in .env file
|
||||
export PROWLER_API_VERSION=latest
|
||||
echo "Using PROWLER_API_VERSION=${PROWLER_API_VERSION}"
|
||||
docker compose up -d api worker worker-beat
|
||||
- name: Wait for API to be ready
|
||||
run: |
|
||||
echo "Waiting for prowler-api..."
|
||||
timeout=150 # 5 minutes max
|
||||
elapsed=0
|
||||
while [ $elapsed -lt $timeout ]; do
|
||||
if curl -s ${NEXT_PUBLIC_API_BASE_URL}/docs >/dev/null 2>&1; then
|
||||
echo "Prowler API is ready!"
|
||||
exit 0
|
||||
fi
|
||||
echo "Waiting for prowler-api... (${elapsed}s elapsed)"
|
||||
sleep 5
|
||||
elapsed=$((elapsed + 5))
|
||||
done
|
||||
echo "Timeout waiting for prowler-api to start"
|
||||
exit 1
|
||||
- name: Load database fixtures for E2E tests
|
||||
run: |
|
||||
docker compose exec -T api sh -c '
|
||||
echo "Loading all fixtures from api/fixtures/dev/..."
|
||||
for fixture in api/fixtures/dev/*.json; do
|
||||
if [ -f "$fixture" ]; then
|
||||
echo "Loading $fixture"
|
||||
poetry run python manage.py loaddata "$fixture" --database admin
|
||||
fi
|
||||
done
|
||||
echo "All database fixtures loaded successfully!"
|
||||
'
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
|
||||
with:
|
||||
node-version: '24.13.0'
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
run_install: false
|
||||
- name: Get pnpm store directory
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
- name: Setup pnpm and Next.js cache
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: |
|
||||
${{ env.STORE_PATH }}
|
||||
./ui/node_modules
|
||||
./ui/.next/cache
|
||||
key: ${{ runner.os }}-pnpm-nextjs-${{ hashFiles('ui/pnpm-lock.yaml') }}-${{ hashFiles('ui/**/*.ts', 'ui/**/*.tsx', 'ui/**/*.js', 'ui/**/*.jsx') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-nextjs-${{ hashFiles('ui/pnpm-lock.yaml') }}-
|
||||
${{ runner.os }}-pnpm-nextjs-
|
||||
- name: Install UI dependencies
|
||||
working-directory: ./ui
|
||||
run: pnpm install --frozen-lockfile --prefer-offline
|
||||
- name: Build UI application
|
||||
working-directory: ./ui
|
||||
run: pnpm run build
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-
|
||||
- name: Install Playwright browsers
|
||||
working-directory: ./ui
|
||||
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: pnpm run test:e2e:install
|
||||
- name: Run E2E tests
|
||||
working-directory: ./ui
|
||||
run: pnpm run test:e2e
|
||||
- name: Upload test reports
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
if: failure()
|
||||
with:
|
||||
name: playwright-report
|
||||
path: ui/playwright-report/
|
||||
retention-days: 30
|
||||
- name: Cleanup services
|
||||
if: always()
|
||||
run: |
|
||||
echo "Shutting down services..."
|
||||
docker compose down -v || true
|
||||
echo "Cleanup completed"
|
||||
@@ -44,6 +44,7 @@ Use these skills for detailed patterns on-demand:
|
||||
| `prowler-commit` | Professional commits (conventional-commits) | [SKILL.md](skills/prowler-commit/SKILL.md) |
|
||||
| `prowler-pr` | Pull request conventions | [SKILL.md](skills/prowler-pr/SKILL.md) |
|
||||
| `prowler-docs` | Documentation style guide | [SKILL.md](skills/prowler-docs/SKILL.md) |
|
||||
| `prowler-attack-paths-query` | Create Attack Paths openCypher queries | [SKILL.md](skills/prowler-attack-paths-query/SKILL.md) |
|
||||
| `skill-creator` | Create new AI agent skills | [SKILL.md](skills/skill-creator/SKILL.md) |
|
||||
|
||||
### Auto-invoke Skills
|
||||
@@ -56,6 +57,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Adding DRF pagination or permissions | `django-drf` |
|
||||
| Adding new providers | `prowler-provider` |
|
||||
| Adding services to existing providers | `prowler-provider` |
|
||||
| Adding privilege escalation detection queries | `prowler-attack-paths-query` |
|
||||
| After creating/modifying a skill | `skill-sync` |
|
||||
| App Router / Server Actions | `nextjs-15` |
|
||||
| Building AI chat features | `ai-sdk-5` |
|
||||
@@ -63,6 +65,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Create PR that requires changelog entry | `prowler-changelog` |
|
||||
| Create a PR with gh pr create | `prowler-pr` |
|
||||
| Creating API endpoints | `jsonapi` |
|
||||
| Creating Attack Paths queries | `prowler-attack-paths-query` |
|
||||
| Creating ViewSets, serializers, or filters in api/ | `django-drf` |
|
||||
| Creating Zod schemas | `zod-4` |
|
||||
| Creating a git commit | `prowler-commit` |
|
||||
@@ -92,6 +95,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Understand changelog gate and no-changelog label behavior | `prowler-ci` |
|
||||
| Understand review ownership with CODEOWNERS | `prowler-pr` |
|
||||
| Update CHANGELOG.md in any component | `prowler-changelog` |
|
||||
| Updating existing Attack Paths queries | `prowler-attack-paths-query` |
|
||||
| Updating existing checks and metadata | `prowler-sdk-check` |
|
||||
| Using Zustand stores | `zustand-5` |
|
||||
| Working on MCP server tools | `prowler-mcp` |
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
> **Skills Reference**: For detailed patterns, use these skills:
|
||||
> - [`prowler-api`](../skills/prowler-api/SKILL.md) - Models, Serializers, Views, RLS patterns
|
||||
> - [`prowler-test-api`](../skills/prowler-test-api/SKILL.md) - Testing patterns (pytest-django)
|
||||
> - [`prowler-attack-paths-query`](../skills/prowler-attack-paths-query/SKILL.md) - Attack Paths openCypher queries
|
||||
> - [`django-drf`](../skills/django-drf/SKILL.md) - Generic DRF patterns
|
||||
> - [`jsonapi`](../skills/jsonapi/SKILL.md) - Strict JSON:API v1.1 spec compliance
|
||||
> - [`pytest`](../skills/pytest/SKILL.md) - Generic pytest patterns
|
||||
@@ -15,9 +16,11 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
|--------|-------|
|
||||
| Add changelog entry for a PR or feature | `prowler-changelog` |
|
||||
| Adding DRF pagination or permissions | `django-drf` |
|
||||
| Adding privilege escalation detection queries | `prowler-attack-paths-query` |
|
||||
| Committing changes | `prowler-commit` |
|
||||
| Create PR that requires changelog entry | `prowler-changelog` |
|
||||
| Creating API endpoints | `jsonapi` |
|
||||
| Creating Attack Paths queries | `prowler-attack-paths-query` |
|
||||
| Creating ViewSets, serializers, or filters in api/ | `django-drf` |
|
||||
| Creating a git commit | `prowler-commit` |
|
||||
| Creating/modifying models, views, serializers | `prowler-api` |
|
||||
@@ -27,6 +30,7 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Reviewing JSON:API compliance | `jsonapi` |
|
||||
| Testing RLS tenant isolation | `prowler-test-api` |
|
||||
| Update CHANGELOG.md in any component | `prowler-changelog` |
|
||||
| Updating existing Attack Paths queries | `prowler-attack-paths-query` |
|
||||
| Writing Prowler API tests | `prowler-test-api` |
|
||||
| Writing Python tests with pytest | `pytest` |
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.19.0"
|
||||
version = "1.20.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: Prowler API
|
||||
version: 1.19.0
|
||||
version: 1.20.0
|
||||
description: |-
|
||||
Prowler API specification.
|
||||
|
||||
|
||||
@@ -392,7 +392,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.19.0"
|
||||
spectacular_settings.VERSION = "1.20.0"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
|
||||
@@ -115,8 +115,8 @@ To update the environment file:
|
||||
Edit the `.env` file and change version values:
|
||||
|
||||
```env
|
||||
PROWLER_UI_VERSION="5.17.0"
|
||||
PROWLER_API_VERSION="5.17.0"
|
||||
PROWLER_UI_VERSION="5.18.0"
|
||||
PROWLER_API_VERSION="5.18.0"
|
||||
```
|
||||
|
||||
<Note>
|
||||
|
||||
@@ -86,3 +86,81 @@ docker compose up -d
|
||||
<Note>
|
||||
We are evaluating adding these values to the default `docker-compose.yml` to avoid this issue in future releases.
|
||||
</Note>
|
||||
|
||||
### API Container Fails to Start with JWT Key Permission Error
|
||||
|
||||
See [GitHub Issue #8897](https://github.com/prowler-cloud/prowler/issues/8897) for more details.
|
||||
|
||||
When deploying Prowler via Docker Compose on a fresh installation, the API container may fail to start with permission errors related to JWT RSA key file generation. This issue is commonly observed on Linux systems (Ubuntu, Debian, cloud VMs) and Windows with Docker Desktop, but not typically on macOS.
|
||||
|
||||
**Error Message:**
|
||||
|
||||
Checking the API container logs reveals:
|
||||
|
||||
```bash
|
||||
PermissionError: [Errno 13] Permission denied: '/home/prowler/.config/prowler-api/jwt_private.pem'
|
||||
```
|
||||
|
||||
Or:
|
||||
|
||||
```bash
|
||||
Token generation failed due to invalid key configuration. Provide valid DJANGO_TOKEN_SIGNING_KEY and DJANGO_TOKEN_VERIFYING_KEY in the environment.
|
||||
```
|
||||
|
||||
**Root Cause:**
|
||||
|
||||
This permission mismatch occurs due to UID (User ID) mapping between the host system and Docker containers:
|
||||
|
||||
* The API container runs as user `prowler` with UID/GID 1000
|
||||
* In environments like WSL2, the host user may have a different UID than the container user
|
||||
* Docker creates the mounted volume directory `./_data/api` on the host, often with the host user's UID or root ownership (UID 0)
|
||||
* When the application attempts to write JWT key files (`jwt_private.pem` and `jwt_public.pem`), the operation fails because the container's UID 1000 does not have write permissions to the host-owned directory
|
||||
|
||||
**Solutions:**
|
||||
|
||||
There are two approaches to resolve this issue:
|
||||
|
||||
**Option 1: Fix Volume Ownership (Resolve UID Mapping)**
|
||||
|
||||
Change the ownership of the volume directory to match the container user's UID (1000):
|
||||
|
||||
```bash
|
||||
# The container user 'prowler' has UID 1000
|
||||
# This command changes the directory ownership to UID 1000
|
||||
sudo chown -R 1000:1000 ./_data/api
|
||||
```
|
||||
|
||||
Then start Docker Compose:
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This solution directly addresses the UID mapping mismatch by ensuring the volume directory is owned by the same UID that the container process uses.
|
||||
|
||||
**Option 2: Use Environment Variables (Skip File Storage)**
|
||||
|
||||
Generate JWT RSA keys manually and provide them via environment variables to bypass file-based key storage entirely:
|
||||
|
||||
```bash
|
||||
# Generate RSA keys
|
||||
openssl genrsa -out jwt_private.pem 4096
|
||||
openssl rsa -in jwt_private.pem -pubout -out jwt_public.pem
|
||||
|
||||
# Extract key content (removes headers/footers and newlines)
|
||||
PRIVATE_KEY=$(awk 'NF {sub(/\r/, ""); printf "%s\\n",$0;}' jwt_private.pem)
|
||||
PUBLIC_KEY=$(awk 'NF {sub(/\r/, ""); printf "%s\\n",$0;}' jwt_public.pem)
|
||||
```
|
||||
|
||||
Add the following to the `.env` file:
|
||||
|
||||
```env
|
||||
DJANGO_TOKEN_SIGNING_KEY=<content of jwt_private.pem>
|
||||
DJANGO_TOKEN_VERIFYING_KEY=<content of jwt_public.pem>
|
||||
```
|
||||
|
||||
When these environment variables are set, the API will use them directly instead of attempting to write key files to the mounted volume.
|
||||
|
||||
<Note>
|
||||
A fix addressing this permission issue is being evaluated in [PR #9953](https://github.com/prowler-cloud/prowler/pull/9953).
|
||||
</Note>
|
||||
|
||||
@@ -2,6 +2,18 @@
|
||||
|
||||
All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
## [5.19.0] (Prowler UNRELEASED)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- AI Skills: Added a skill for creating new Attack Paths queries in openCypher, compatible with Neo4j and Neptune [(#9975)](https://github.com/prowler-cloud/prowler/pull/9975)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Parallelize Cloudflare zone API calls with threading to improve scan performance [(#9982)](https://github.com/prowler-cloud/prowler/pull/9982)
|
||||
|
||||
---
|
||||
|
||||
## [5.18.0] (Prowler v5.18.0)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
@@ -38,7 +38,7 @@ class _MutableTimestamp:
|
||||
|
||||
timestamp = _MutableTimestamp(datetime.today())
|
||||
timestamp_utc = _MutableTimestamp(datetime.now(timezone.utc))
|
||||
prowler_version = "5.18.0"
|
||||
prowler_version = "5.19.0"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
square_logo_img = "https://raw.githubusercontent.com/prowler-cloud/prowler/dc7d2d5aeb92fdf12e8604f42ef6472cd3e8e889/docs/img/prowler-logo-black.png"
|
||||
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
from prowler.providers.cloudflare.cloudflare_provider import CloudflareProvider
|
||||
|
||||
MAX_WORKERS = 10
|
||||
|
||||
|
||||
class CloudflareService:
|
||||
"""Base class for Cloudflare services to share provider context."""
|
||||
@@ -10,3 +14,23 @@ class CloudflareService:
|
||||
self.audit_config = provider.audit_config
|
||||
self.fixer_config = provider.fixer_config
|
||||
self.service = service.lower() if not service.islower() else service
|
||||
|
||||
# Thread pool for __threading_call__
|
||||
self.thread_pool = ThreadPoolExecutor(max_workers=MAX_WORKERS)
|
||||
|
||||
def __threading_call__(self, call, iterator):
|
||||
"""Execute a function across multiple items using threading."""
|
||||
items = list(iterator) if not isinstance(iterator, list) else iterator
|
||||
|
||||
futures = {self.thread_pool.submit(call, item): item for item in items}
|
||||
results = []
|
||||
|
||||
for future in as_completed(futures):
|
||||
try:
|
||||
result = future.result()
|
||||
if result is not None:
|
||||
results.append(result)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return results
|
||||
|
||||
@@ -49,13 +49,13 @@ class Zone(CloudflareService):
|
||||
super().__init__(__class__.__name__, provider)
|
||||
self.zones: dict[str, "CloudflareZone"] = {}
|
||||
self._list_zones()
|
||||
self._get_zones_settings()
|
||||
self._get_zones_dnssec()
|
||||
self._get_zones_universal_ssl()
|
||||
self._get_zones_rate_limit_rules()
|
||||
self._get_zones_bot_management()
|
||||
self._get_zones_firewall_rules()
|
||||
self._get_zones_waf_rulesets()
|
||||
self.__threading_call__(self._get_zone_settings_threaded, self.zones.values())
|
||||
self.__threading_call__(self._get_zone_dnssec, self.zones.values())
|
||||
self.__threading_call__(self._get_zone_universal_ssl, self.zones.values())
|
||||
self.__threading_call__(self._get_zone_rate_limit_rules, self.zones.values())
|
||||
self.__threading_call__(self._get_zone_bot_management, self.zones.values())
|
||||
self.__threading_call__(self._get_zone_firewall_rules, self.zones.values())
|
||||
self.__threading_call__(self._get_zone_waf_rulesets, self.zones.values())
|
||||
|
||||
def _list_zones(self) -> None:
|
||||
"""List all Cloudflare zones with their basic information."""
|
||||
@@ -124,110 +124,89 @@ class Zone(CloudflareService):
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_zones_settings(self) -> None:
|
||||
"""Get settings for all zones."""
|
||||
logger.info("Zone - Getting zone settings...")
|
||||
for zone in self.zones.values():
|
||||
try:
|
||||
zone.settings = self._get_zone_settings(zone.id)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
def _get_zone_settings_threaded(self, zone: "CloudflareZone") -> None:
|
||||
"""Get settings for a single zone (thread-safe)."""
|
||||
try:
|
||||
zone.settings = self._get_zone_settings(zone.id)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_zones_dnssec(self) -> None:
|
||||
"""Get DNSSEC status for all zones."""
|
||||
logger.info("Zone - Getting DNSSEC status...")
|
||||
for zone in self.zones.values():
|
||||
try:
|
||||
dnssec = self.client.dns.dnssec.get(zone_id=zone.id)
|
||||
zone.dnssec_status = getattr(dnssec, "status", None)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
def _get_zone_dnssec(self, zone: "CloudflareZone") -> None:
|
||||
"""Get DNSSEC status for a single zone."""
|
||||
try:
|
||||
dnssec = self.client.dns.dnssec.get(zone_id=zone.id)
|
||||
zone.dnssec_status = getattr(dnssec, "status", None)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_zones_universal_ssl(self) -> None:
|
||||
"""Get Universal SSL settings for all zones."""
|
||||
logger.info("Zone - Getting Universal SSL settings...")
|
||||
for zone in self.zones.values():
|
||||
try:
|
||||
universal_ssl = self.client.ssl.universal.settings.get(zone_id=zone.id)
|
||||
zone.settings.universal_ssl_enabled = getattr(
|
||||
universal_ssl, "enabled", False
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
def _get_zone_universal_ssl(self, zone: "CloudflareZone") -> None:
|
||||
"""Get Universal SSL settings for a single zone."""
|
||||
try:
|
||||
universal_ssl = self.client.ssl.universal.settings.get(zone_id=zone.id)
|
||||
zone.settings.universal_ssl_enabled = getattr(
|
||||
universal_ssl, "enabled", False
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_zones_rate_limit_rules(self) -> None:
|
||||
"""Get rate limiting rules for all zones."""
|
||||
logger.info("Zone - Getting rate limit rules...")
|
||||
for zone in self.zones.values():
|
||||
try:
|
||||
seen_ruleset_ids: set[str] = set()
|
||||
for ruleset in self.client.rulesets.list(zone_id=zone.id):
|
||||
ruleset_id = getattr(ruleset, "id", "")
|
||||
if ruleset_id in seen_ruleset_ids:
|
||||
break
|
||||
seen_ruleset_ids.add(ruleset_id)
|
||||
def _get_zone_rate_limit_rules(self, zone: "CloudflareZone") -> None:
|
||||
"""Get rate limiting rules for a single zone."""
|
||||
try:
|
||||
seen_ruleset_ids: set[str] = set()
|
||||
for ruleset in self.client.rulesets.list(zone_id=zone.id):
|
||||
ruleset_id = getattr(ruleset, "id", "")
|
||||
if ruleset_id in seen_ruleset_ids:
|
||||
break
|
||||
seen_ruleset_ids.add(ruleset_id)
|
||||
|
||||
phase = getattr(ruleset, "phase", "")
|
||||
if phase == "http_ratelimit":
|
||||
try:
|
||||
ruleset_detail = self.client.rulesets.get(
|
||||
ruleset_id=ruleset_id, zone_id=zone.id
|
||||
)
|
||||
rules = getattr(ruleset_detail, "rules", []) or []
|
||||
seen_rule_ids: set[str] = set()
|
||||
for rule in rules:
|
||||
rule_id = getattr(rule, "id", "")
|
||||
if rule_id in seen_rule_ids:
|
||||
break
|
||||
seen_rule_ids.add(rule_id)
|
||||
zone.rate_limit_rules.append(
|
||||
CloudflareRateLimitRule(
|
||||
id=rule_id,
|
||||
description=getattr(rule, "description", None),
|
||||
action=getattr(rule, "action", None),
|
||||
enabled=getattr(rule, "enabled", True),
|
||||
expression=getattr(rule, "expression", None),
|
||||
)
|
||||
phase = getattr(ruleset, "phase", "")
|
||||
if phase == "http_ratelimit":
|
||||
try:
|
||||
ruleset_detail = self.client.rulesets.get(
|
||||
ruleset_id=ruleset_id, zone_id=zone.id
|
||||
)
|
||||
rules = getattr(ruleset_detail, "rules", []) or []
|
||||
seen_rule_ids: set[str] = set()
|
||||
for rule in rules:
|
||||
rule_id = getattr(rule, "id", "")
|
||||
if rule_id in seen_rule_ids:
|
||||
break
|
||||
seen_rule_ids.add(rule_id)
|
||||
zone.rate_limit_rules.append(
|
||||
CloudflareRateLimitRule(
|
||||
id=rule_id,
|
||||
description=getattr(rule, "description", None),
|
||||
action=getattr(rule, "action", None),
|
||||
enabled=getattr(rule, "enabled", True),
|
||||
expression=getattr(rule, "expression", None),
|
||||
)
|
||||
except Exception as error:
|
||||
logger.debug(
|
||||
f"{zone.id} ruleset {ruleset_id} -- {error.__class__.__name__}: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.debug(
|
||||
f"{zone.id} ruleset {ruleset_id} -- {error.__class__.__name__}: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_zones_bot_management(self) -> None:
|
||||
"""Get Bot Management settings for all zones."""
|
||||
logger.info("Zone - Getting Bot Management settings...")
|
||||
for zone in self.zones.values():
|
||||
try:
|
||||
bot_management = self.client.bot_management.get(zone_id=zone.id)
|
||||
zone.settings.bot_fight_mode_enabled = getattr(
|
||||
bot_management, "fight_mode", False
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_zones_firewall_rules(self) -> None:
|
||||
"""Get firewall rules for all zones."""
|
||||
logger.info("Zone - Getting firewall rules...")
|
||||
for zone in self.zones.values():
|
||||
try:
|
||||
self._get_zone_firewall_rules(zone)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
def _get_zone_bot_management(self, zone: "CloudflareZone") -> None:
|
||||
"""Get Bot Management settings for a single zone."""
|
||||
try:
|
||||
bot_management = self.client.bot_management.get(zone_id=zone.id)
|
||||
zone.settings.bot_fight_mode_enabled = getattr(
|
||||
bot_management, "fight_mode", False
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_zone_firewall_rules(self, zone: "CloudflareZone") -> None:
|
||||
"""List firewall rules from custom rulesets for a zone."""
|
||||
@@ -282,17 +261,6 @@ class Zone(CloudflareService):
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_zones_waf_rulesets(self) -> None:
|
||||
"""Get WAF rulesets for all zones."""
|
||||
logger.info("Zone - Getting WAF rulesets...")
|
||||
for zone in self.zones.values():
|
||||
try:
|
||||
self._get_zone_waf_rulesets(zone)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_zone_waf_rulesets(self, zone: "CloudflareZone") -> None:
|
||||
"""List WAF rulesets for a zone using the rulesets API."""
|
||||
seen_ids: set[str] = set()
|
||||
|
||||
@@ -92,7 +92,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
|
||||
name = "prowler"
|
||||
readme = "README.md"
|
||||
requires-python = ">3.9.1,<3.13"
|
||||
version = "5.18.0"
|
||||
version = "5.19.0"
|
||||
|
||||
[project.scripts]
|
||||
prowler = "prowler.__main__:prowler"
|
||||
|
||||
@@ -77,6 +77,7 @@ Patterns tailored for Prowler development:
|
||||
| `prowler-provider` | Add new cloud providers |
|
||||
| `prowler-pr` | Pull request conventions |
|
||||
| `prowler-docs` | Documentation style guide |
|
||||
| `prowler-attack-paths-query` | Create Attack Paths openCypher queries |
|
||||
|
||||
### Meta Skills
|
||||
|
||||
|
||||
479
skills/prowler-attack-paths-query/SKILL.md
Normal file
479
skills/prowler-attack-paths-query/SKILL.md
Normal file
@@ -0,0 +1,479 @@
|
||||
---
|
||||
name: prowler-attack-paths-query
|
||||
description: >
|
||||
Creates Prowler Attack Paths openCypher queries for graph analysis (compatible with Neo4j and Neptune).
|
||||
Trigger: When creating or updating Attack Paths queries that detect privilege escalation paths,
|
||||
network exposure, or security misconfigurations in cloud environments.
|
||||
license: Apache-2.0
|
||||
metadata:
|
||||
author: prowler-cloud
|
||||
version: "1.0"
|
||||
scope: [root, api]
|
||||
auto_invoke:
|
||||
- "Creating Attack Paths queries"
|
||||
- "Updating existing Attack Paths queries"
|
||||
- "Adding privilege escalation detection queries"
|
||||
allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, Task
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
Attack Paths queries are openCypher queries that analyze cloud infrastructure graphs
|
||||
(ingested via Cartography) to detect security risks like privilege escalation paths,
|
||||
network exposure, and misconfigurations.
|
||||
|
||||
Queries are written in **openCypher Version 9** to ensure compatibility with both Neo4j and Amazon Neptune.
|
||||
|
||||
---
|
||||
|
||||
## Input Sources
|
||||
|
||||
Queries can be created from:
|
||||
|
||||
1. **pathfinding.cloud ID** (e.g., `ECS-001`, `GLUE-001`)
|
||||
- The JSON index contains: `id`, `name`, `description`, `services`, `permissions`, `exploitationSteps`, `prerequisites`, etc.
|
||||
- Reference: https://github.com/DataDog/pathfinding.cloud
|
||||
|
||||
**Fetching a single path by ID** — The aggregated `paths.json` is too large for WebFetch
|
||||
(content gets truncated). Use Bash with `curl` and a JSON parser instead:
|
||||
|
||||
Prefer `jq` (concise), fall back to `python3` (guaranteed in this Python project):
|
||||
```bash
|
||||
# With jq
|
||||
curl -s https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json \
|
||||
| jq '.[] | select(.id == "ecs-002")'
|
||||
|
||||
# With python3 (fallback)
|
||||
curl -s https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json \
|
||||
| python3 -c "import json,sys; print(json.dumps(next((p for p in json.load(sys.stdin) if p['id']=='ecs-002'), None), indent=2))"
|
||||
```
|
||||
|
||||
2. **Listing Available Attack Paths**
|
||||
- Use Bash to list available paths from the JSON index:
|
||||
```bash
|
||||
# List all path IDs and names (jq)
|
||||
curl -s https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json \
|
||||
| jq -r '.[] | "\(.id): \(.name)"'
|
||||
|
||||
# List all path IDs and names (python3 fallback)
|
||||
curl -s https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json \
|
||||
| python3 -c "import json,sys; [print(f\"{p['id']}: {p['name']}\") for p in json.load(sys.stdin)]"
|
||||
|
||||
# List paths filtered by service prefix
|
||||
curl -s https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json \
|
||||
| jq -r '.[] | select(.id | startswith("ecs")) | "\(.id): \(.name)"'
|
||||
```
|
||||
|
||||
3. **Natural Language Description**
|
||||
- User describes the Attack Paths in plain language
|
||||
- Agent maps to appropriate openCypher patterns
|
||||
|
||||
---
|
||||
|
||||
## Query Structure
|
||||
|
||||
### File Location
|
||||
|
||||
```
|
||||
api/src/backend/api/attack_paths/queries/{provider}.py
|
||||
```
|
||||
|
||||
Example: `api/src/backend/api/attack_paths/queries/aws.py`
|
||||
|
||||
### Query Definition Pattern
|
||||
|
||||
```python
|
||||
from api.attack_paths.queries.types import (
|
||||
AttackPathsQueryDefinition,
|
||||
AttackPathsQueryParameterDefinition,
|
||||
)
|
||||
from tasks.jobs.attack_paths.config import PROWLER_FINDING_LABEL
|
||||
|
||||
# {REFERENCE_ID} (e.g., EC2-001, GLUE-001)
|
||||
AWS_{QUERY_NAME} = AttackPathsQueryDefinition(
|
||||
id="aws-{kebab-case-name}",
|
||||
name="Privilege Escalation: {permission1} + {permission2}",
|
||||
description="{Detailed description of the Attack Paths}.",
|
||||
provider="aws",
|
||||
cypher=f"""
|
||||
// Find principals with {permission1}
|
||||
MATCH path_principal = (aws:AWSAccount {{id: $provider_uid}})--(principal:AWSPrincipal)--(policy:AWSPolicy)--(stmt:AWSPolicyStatement)
|
||||
WHERE stmt.effect = 'Allow'
|
||||
AND any(action IN stmt.action WHERE
|
||||
toLower(action) = '{permission1_lowercase}'
|
||||
OR toLower(action) = '{service}:*'
|
||||
OR action = '*'
|
||||
)
|
||||
|
||||
// Find {permission2}
|
||||
MATCH (principal)--(policy2:AWSPolicy)--(stmt2:AWSPolicyStatement)
|
||||
WHERE stmt2.effect = 'Allow'
|
||||
AND any(action IN stmt2.action WHERE
|
||||
toLower(action) = '{permission2_lowercase}'
|
||||
OR toLower(action) = '{service2}:*'
|
||||
OR action = '*'
|
||||
)
|
||||
|
||||
// Find target resources
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: '{service}.amazonaws.com'}})
|
||||
WHERE any(resource IN stmt.resource WHERE
|
||||
resource = '*'
|
||||
OR target_role.arn CONTAINS resource
|
||||
OR resource CONTAINS target_role.name
|
||||
)
|
||||
|
||||
UNWIND nodes(path_principal) + nodes(path_target) as n
|
||||
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {{status: 'FAIL', provider_uid: $provider_uid}})
|
||||
|
||||
RETURN path_principal, path_target,
|
||||
collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
|
||||
""",
|
||||
parameters=[],
|
||||
)
|
||||
```
|
||||
|
||||
### Register in Query List
|
||||
|
||||
Add to the `{PROVIDER}_QUERIES` list at the bottom of the file:
|
||||
|
||||
```python
|
||||
AWS_QUERIES: list[AttackPathsQueryDefinition] = [
|
||||
# ... existing queries ...
|
||||
AWS_{NEW_QUERY_NAME}, # Add here
|
||||
]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Step-by-Step Creation Process
|
||||
|
||||
### 1. Read the Queries Module
|
||||
|
||||
**FIRST**, read all files in the queries module to understand the structure:
|
||||
|
||||
```
|
||||
api/src/backend/api/attack_paths/queries/
|
||||
├── __init__.py # Module exports
|
||||
├── types.py # AttackPathsQueryDefinition, AttackPathsQueryParameterDefinition
|
||||
├── registry.py # Query registry logic
|
||||
└── {provider}.py # Provider-specific queries (e.g., aws.py)
|
||||
```
|
||||
|
||||
Read these files to learn:
|
||||
|
||||
- Type definitions and available fields
|
||||
- How queries are registered
|
||||
- Current query patterns, style, and naming conventions
|
||||
|
||||
### 2. Determine Schema Source
|
||||
|
||||
Check the Cartography dependency in `api/pyproject.toml`:
|
||||
|
||||
```bash
|
||||
grep cartography api/pyproject.toml
|
||||
```
|
||||
|
||||
Parse the dependency to determine the schema source:
|
||||
|
||||
**If git-based dependency** (e.g., `cartography @ git+https://github.com/prowler-cloud/cartography@0.126.1`):
|
||||
|
||||
- Extract the repository (e.g., `prowler-cloud/cartography`)
|
||||
- Extract the version/tag (e.g., `0.126.1`)
|
||||
- Fetch schema from that repository at that tag
|
||||
|
||||
**If PyPI dependency** (e.g., `cartography = "^0.126.0"` or `cartography>=0.126.0`):
|
||||
|
||||
- Extract the version (e.g., `0.126.0`)
|
||||
- Use the official `cartography-cncf` repository
|
||||
|
||||
**Schema URL patterns** (ALWAYS use the specific version tag, not master/main):
|
||||
|
||||
```
|
||||
# Official Cartography (cartography-cncf)
|
||||
https://raw.githubusercontent.com/cartography-cncf/cartography/refs/tags/{version}/docs/root/modules/{provider}/schema.md
|
||||
|
||||
# Prowler fork (prowler-cloud)
|
||||
https://raw.githubusercontent.com/prowler-cloud/cartography/refs/tags/{version}/docs/root/modules/{provider}/schema.md
|
||||
```
|
||||
|
||||
**Examples**:
|
||||
|
||||
```bash
|
||||
# For prowler-cloud/cartography@0.126.1 (git), fetch AWS schema:
|
||||
https://raw.githubusercontent.com/prowler-cloud/cartography/refs/tags/0.126.1/docs/root/modules/aws/schema.md
|
||||
|
||||
# For cartography = "^0.126.0" (PyPI), fetch AWS schema:
|
||||
https://raw.githubusercontent.com/cartography-cncf/cartography/refs/tags/0.126.0/docs/root/modules/aws/schema.md
|
||||
```
|
||||
|
||||
**IMPORTANT**: Always match the schema version to the dependency version in `pyproject.toml`. Using master/main may reference node labels or properties that don't exist in the deployed version.
|
||||
|
||||
**Additional Prowler Labels**: The Attack Paths sync task adds extra labels:
|
||||
|
||||
- `ProwlerFinding` - Prowler finding nodes with `status`, `provider_uid` properties
|
||||
- `ProviderResource` - Generic resource marker
|
||||
- `{Provider}Resource` - Provider-specific marker (e.g., `AWSResource`)
|
||||
|
||||
These are defined in `api/src/backend/tasks/jobs/attack_paths/config.py`.
|
||||
|
||||
### 3. Consult the Schema for Available Data
|
||||
|
||||
Use the Cartography schema to discover:
|
||||
|
||||
- What node labels exist for the target resources
|
||||
- What properties are available on those nodes
|
||||
- What relationships connect the nodes
|
||||
|
||||
This informs query design by showing what data is actually available to query.
|
||||
|
||||
### 4. Create Query Definition
|
||||
|
||||
Use the standard pattern (see above) with:
|
||||
|
||||
- **id**: Auto-generated as `{provider}-{kebab-case-description}`
|
||||
- **name**: Human-readable, e.g., "Privilege Escalation: {perm1} + {perm2}"
|
||||
- **description**: Explain the attack vector and impact
|
||||
- **provider**: Provider identifier (aws, azure, gcp, kubernetes, github)
|
||||
- **cypher**: The openCypher query with proper escaping
|
||||
- **parameters**: Optional list of user-provided parameters (use `parameters=[]` if none needed)
|
||||
|
||||
### 5. Add Query to Provider List
|
||||
|
||||
Add the constant to the `{PROVIDER}_QUERIES` list.
|
||||
|
||||
---
|
||||
|
||||
## Query Naming Conventions
|
||||
|
||||
### Query ID
|
||||
|
||||
```
|
||||
{provider}-{category}-{description}
|
||||
```
|
||||
|
||||
Examples:
|
||||
|
||||
- `aws-ec2-privesc-passrole-iam`
|
||||
- `aws-iam-privesc-attach-role-policy-assume-role`
|
||||
- `aws-rds-unencrypted-storage`
|
||||
|
||||
### Query Constant Name
|
||||
|
||||
```
|
||||
{PROVIDER}_{CATEGORY}_{DESCRIPTION}
|
||||
```
|
||||
|
||||
Examples:
|
||||
|
||||
- `AWS_EC2_PRIVESC_PASSROLE_IAM`
|
||||
- `AWS_IAM_PRIVESC_ATTACH_ROLE_POLICY_ASSUME_ROLE`
|
||||
- `AWS_RDS_UNENCRYPTED_STORAGE`
|
||||
|
||||
---
|
||||
|
||||
## Query Categories
|
||||
|
||||
| Category | Description | Example |
|
||||
| -------------------- | ------------------------------ | ------------------------- |
|
||||
| Basic Resource | List resources with properties | RDS instances, S3 buckets |
|
||||
| Network Exposure | Internet-exposed resources | EC2 with public IPs |
|
||||
| Privilege Escalation | IAM privilege escalation paths | PassRole + RunInstances |
|
||||
| Data Access | Access to sensitive data | EC2 with S3 access |
|
||||
|
||||
---
|
||||
|
||||
## Common openCypher Patterns
|
||||
|
||||
### Match Account and Principal
|
||||
|
||||
```cypher
|
||||
MATCH path_principal = (aws:AWSAccount {id: $provider_uid})--(principal:AWSPrincipal)--(policy:AWSPolicy)--(stmt:AWSPolicyStatement)
|
||||
```
|
||||
|
||||
### Check IAM Action Permissions
|
||||
|
||||
```cypher
|
||||
WHERE stmt.effect = 'Allow'
|
||||
AND any(action IN stmt.action WHERE
|
||||
toLower(action) = 'iam:passrole'
|
||||
OR toLower(action) = 'iam:*'
|
||||
OR action = '*'
|
||||
)
|
||||
```
|
||||
|
||||
### Find Roles Trusting a Service
|
||||
|
||||
```cypher
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {arn: 'ec2.amazonaws.com'})
|
||||
```
|
||||
|
||||
### Check Resource Scope
|
||||
|
||||
```cypher
|
||||
WHERE any(resource IN stmt.resource WHERE
|
||||
resource = '*'
|
||||
OR target_role.arn CONTAINS resource
|
||||
OR resource CONTAINS target_role.name
|
||||
)
|
||||
```
|
||||
|
||||
### Include Prowler Findings
|
||||
|
||||
```cypher
|
||||
UNWIND nodes(path_principal) + nodes(path_target) as n
|
||||
OPTIONAL MATCH (n)-[pfr]-(pf:{PROWLER_FINDING_LABEL} {status: 'FAIL', provider_uid: $provider_uid})
|
||||
|
||||
RETURN path_principal, path_target,
|
||||
collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Node Labels by Provider
|
||||
|
||||
### AWS
|
||||
|
||||
| Label | Description |
|
||||
| -------------------- | ----------------------------------- |
|
||||
| `AWSAccount` | AWS account root |
|
||||
| `AWSPrincipal` | IAM principal (user, role, service) |
|
||||
| `AWSRole` | IAM role |
|
||||
| `AWSUser` | IAM user |
|
||||
| `AWSPolicy` | IAM policy |
|
||||
| `AWSPolicyStatement` | Policy statement |
|
||||
| `EC2Instance` | EC2 instance |
|
||||
| `EC2SecurityGroup` | Security group |
|
||||
| `S3Bucket` | S3 bucket |
|
||||
| `RDSInstance` | RDS database instance |
|
||||
| `LoadBalancer` | Classic ELB |
|
||||
| `LoadBalancerV2` | ALB/NLB |
|
||||
| `LaunchTemplate` | EC2 launch template |
|
||||
|
||||
### Common Relationships
|
||||
|
||||
| Relationship | Description |
|
||||
| ---------------------- | ----------------------- |
|
||||
| `TRUSTS_AWS_PRINCIPAL` | Role trust relationship |
|
||||
| `STS_ASSUMEROLE_ALLOW` | Can assume role |
|
||||
| `POLICY` | Has policy attached |
|
||||
| `STATEMENT` | Policy has statement |
|
||||
|
||||
---
|
||||
|
||||
## Parameters
|
||||
|
||||
For queries requiring user input, define parameters:
|
||||
|
||||
```python
|
||||
parameters=[
|
||||
AttackPathsQueryParameterDefinition(
|
||||
name="ip",
|
||||
label="IP address",
|
||||
description="Public IP address, e.g. 192.0.2.0.",
|
||||
placeholder="192.0.2.0",
|
||||
),
|
||||
AttackPathsQueryParameterDefinition(
|
||||
name="tag_key",
|
||||
label="Tag key",
|
||||
description="Tag key to filter resources.",
|
||||
placeholder="Environment",
|
||||
),
|
||||
],
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Always filter by provider_uid**: Use `{id: $provider_uid}` on account nodes and `{provider_uid: $provider_uid}` on ProwlerFinding nodes
|
||||
|
||||
2. **Use consistent naming**: Follow existing patterns in the file
|
||||
|
||||
3. **Include Prowler findings**: Always add the OPTIONAL MATCH for ProwlerFinding nodes
|
||||
|
||||
4. **Return distinct findings**: Use `collect(DISTINCT pf)` to avoid duplicates
|
||||
|
||||
5. **Comment the query purpose**: Add inline comments explaining each MATCH clause
|
||||
|
||||
6. **Validate schema first**: Ensure all node labels and properties exist in Cartography schema
|
||||
|
||||
---
|
||||
|
||||
## openCypher Compatibility
|
||||
|
||||
Queries must be written in **openCypher Version 9** to ensure compatibility with both Neo4j and Amazon Neptune.
|
||||
|
||||
> **Why Version 9?** Amazon Neptune implements openCypher Version 9. By targeting this specification, queries work on both Neo4j and Neptune without modification.
|
||||
|
||||
### Avoid These (Not in openCypher spec)
|
||||
|
||||
| Feature | Reason |
|
||||
| --------------------------------------------------- | ----------------------------------------------- |
|
||||
| APOC procedures (`apoc.*`) | Neo4j-specific plugin, not available in Neptune |
|
||||
| Virtual nodes (`apoc.create.vNode`) | APOC-specific |
|
||||
| Virtual relationships (`apoc.create.vRelationship`) | APOC-specific |
|
||||
| Neptune extensions | Not available in Neo4j |
|
||||
| `reduce()` function | Use `UNWIND` + aggregation instead |
|
||||
| `FOREACH` clause | Use `WITH` + `UNWIND` + `SET` instead |
|
||||
| Regex match operator (`=~`) | Not supported in Neptune |
|
||||
|
||||
### CALL Subqueries
|
||||
|
||||
Supported with limitations:
|
||||
|
||||
- Use `WITH` clause to import variables: `CALL { WITH var ... }`
|
||||
- Updates inside CALL subqueries are NOT supported
|
||||
- Emitted variables cannot overlap with variables before the CALL
|
||||
|
||||
---
|
||||
|
||||
## Reference
|
||||
|
||||
### pathfinding.cloud (Attack Path Definitions)
|
||||
|
||||
- **Repository**: https://github.com/DataDog/pathfinding.cloud
|
||||
- **All paths JSON**: `https://raw.githubusercontent.com/DataDog/pathfinding.cloud/main/docs/paths.json`
|
||||
- Use WebFetch to query specific paths or list available services
|
||||
|
||||
### Cartography Schema
|
||||
|
||||
- **URL pattern**: `https://raw.githubusercontent.com/{org}/cartography/refs/tags/{version}/docs/root/modules/{provider}/schema.md`
|
||||
- Always use the version from `api/pyproject.toml`, not master/main
|
||||
|
||||
### openCypher Specification
|
||||
|
||||
- **Neptune openCypher compliance** (what Neptune supports): https://docs.aws.amazon.com/neptune/latest/userguide/feature-opencypher-compliance.html
|
||||
- **Rewriting Cypher for Neptune** (converting Neo4j-specific syntax): https://docs.aws.amazon.com/neptune/latest/userguide/migration-opencypher-rewrites.html
|
||||
- **openCypher project** (spec, grammar, TCK): https://github.com/opencypher/openCypher
|
||||
|
||||
---
|
||||
|
||||
## Learning from the Queries Module
|
||||
|
||||
**IMPORTANT**: Before creating a new query, ALWAYS read the entire queries module:
|
||||
|
||||
```
|
||||
api/src/backend/api/attack_paths/queries/
|
||||
├── __init__.py # Module exports
|
||||
├── types.py # Type definitions
|
||||
├── registry.py # Registry logic
|
||||
└── {provider}.py # Provider queries (aws.py, etc.)
|
||||
```
|
||||
|
||||
Use the existing queries to learn:
|
||||
|
||||
- Query structure and formatting
|
||||
- Variable naming conventions
|
||||
- How to include Prowler findings
|
||||
- Comment style
|
||||
|
||||
> **Compatibility Warning**: Some existing queries use Neo4j-specific features
|
||||
> (e.g., `apoc.create.vNode`, `apoc.create.vRelationship`, regex `=~`) that are
|
||||
> **NOT compatible** with Amazon Neptune. Use these queries to learn general
|
||||
> patterns (structure, naming, Prowler findings integration, comment style) but
|
||||
> **DO NOT copy APOC procedures or other Neo4j-specific syntax** into new queries.
|
||||
> New queries must be pure openCypher Version 9. Refer to the
|
||||
> [openCypher Compatibility](#opencypher-compatibility) section for the full list
|
||||
> of features to avoid.
|
||||
|
||||
**DO NOT** use generic templates. Match the exact style of existing **compatible** queries in the file.
|
||||
@@ -2,7 +2,15 @@
|
||||
|
||||
All notable changes to the **Prowler UI** are documented in this file.
|
||||
|
||||
## [1.18.0] (Prowler UNRELEASED)
|
||||
## [1.18.1] (Prowler UNRELEASED)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Scans page polling now only refreshes scan table data instead of re-rendering the entire server component tree, eliminating redundant API calls to providers, findings, and compliance endpoints every 5 seconds
|
||||
|
||||
---
|
||||
|
||||
## [1.18.0] (Prowler v5.18.0)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
|
||||
@@ -1,21 +1,19 @@
|
||||
import { Suspense } from "react";
|
||||
|
||||
import { getAllProviders } from "@/actions/providers";
|
||||
import { getScans, getScansByState } from "@/actions/scans";
|
||||
import { getScans } from "@/actions/scans";
|
||||
import { auth } from "@/auth.config";
|
||||
import { MutedFindingsConfigButton } from "@/components/providers";
|
||||
import {
|
||||
AutoRefresh,
|
||||
NoProvidersAdded,
|
||||
NoProvidersConnected,
|
||||
ScansFilters,
|
||||
} from "@/components/scans";
|
||||
import { LaunchScanWorkflow } from "@/components/scans/launch-workflow";
|
||||
import { SkeletonTableScans } from "@/components/scans/table";
|
||||
import { ColumnGetScans } from "@/components/scans/table/scans";
|
||||
import { ScansTableWithPolling } from "@/components/scans/table/scans";
|
||||
import { ContentLayout } from "@/components/ui";
|
||||
import { CustomBanner } from "@/components/ui/custom/custom-banner";
|
||||
import { DataTable } from "@/components/ui/table";
|
||||
import {
|
||||
createProviderDetailsMapping,
|
||||
extractProviderUIDs,
|
||||
@@ -57,15 +55,6 @@ export default async function Scans({
|
||||
|
||||
const hasManageScansPermission = session?.user?.permissions?.manage_scans;
|
||||
|
||||
// Get scans data to check for executing scans
|
||||
const scansData = await getScansByState();
|
||||
|
||||
const hasExecutingScan = scansData?.data?.some(
|
||||
(scan: ScanProps) =>
|
||||
scan.attributes.state === "executing" ||
|
||||
scan.attributes.state === "available",
|
||||
);
|
||||
|
||||
// Extract provider UIDs and create provider details mapping for filtering
|
||||
const providerUIDs = providersData ? extractProviderUIDs(providersData) : [];
|
||||
const providerDetails = providersData
|
||||
@@ -82,7 +71,6 @@ export default async function Scans({
|
||||
|
||||
return (
|
||||
<ContentLayout title="Scans" icon="lucide:timer">
|
||||
<AutoRefresh hasExecutingScan={hasExecutingScan} />
|
||||
<>
|
||||
<>
|
||||
{!hasManageScansPermission ? (
|
||||
@@ -177,11 +165,10 @@ const SSRDataTableScans = async ({
|
||||
}) || [];
|
||||
|
||||
return (
|
||||
<DataTable
|
||||
key={`scans-${Date.now()}`}
|
||||
columns={ColumnGetScans}
|
||||
data={expandedScansData || []}
|
||||
metadata={meta}
|
||||
<ScansTableWithPolling
|
||||
initialData={expandedScansData}
|
||||
initialMeta={meta}
|
||||
searchParams={searchParams}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -13,6 +13,7 @@ import { Form } from "@/components/ui/form";
|
||||
import { toast } from "@/components/ui/toast";
|
||||
import { onDemandScanFormSchema } from "@/types";
|
||||
|
||||
import { SCAN_LAUNCHED_EVENT } from "../table/scans/scans-table-with-polling";
|
||||
import { SelectScanProvider } from "./select-scan-provider";
|
||||
|
||||
type ProviderInfo = {
|
||||
@@ -85,6 +86,8 @@ export const LaunchScanWorkflow = ({
|
||||
});
|
||||
// Reset form after successful submission
|
||||
form.reset();
|
||||
// Notify the scans table to refresh and pick up the new scan
|
||||
window.dispatchEvent(new Event(SCAN_LAUNCHED_EVENT));
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export * from "./column-get-scans";
|
||||
export * from "./data-table-row-actions";
|
||||
export * from "./data-table-row-details";
|
||||
export * from "./scans-table-with-polling";
|
||||
|
||||
126
ui/components/scans/table/scans/scans-table-with-polling.tsx
Normal file
126
ui/components/scans/table/scans/scans-table-with-polling.tsx
Normal file
@@ -0,0 +1,126 @@
|
||||
"use client";
|
||||
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
|
||||
import { getScans } from "@/actions/scans";
|
||||
import { AutoRefresh } from "@/components/scans";
|
||||
import { DataTable } from "@/components/ui/table";
|
||||
import { MetaDataProps, ScanProps, SearchParamsProps } from "@/types";
|
||||
|
||||
import { ColumnGetScans } from "./column-get-scans";
|
||||
|
||||
export const SCAN_LAUNCHED_EVENT = "scan-launched";
|
||||
|
||||
interface ScansTableWithPollingProps {
|
||||
initialData: ScanProps[];
|
||||
initialMeta?: MetaDataProps;
|
||||
searchParams: SearchParamsProps;
|
||||
}
|
||||
|
||||
const EXECUTING_STATES = ["executing", "available"] as const;
|
||||
|
||||
function expandScansWithProviderInfo(
|
||||
scans: ScanProps[],
|
||||
included?: Array<{ type: string; id: string; attributes: any }>,
|
||||
) {
|
||||
return (
|
||||
scans?.map((scan) => {
|
||||
const providerId = scan.relationships?.provider?.data?.id;
|
||||
|
||||
if (!providerId) {
|
||||
return { ...scan, providerInfo: undefined };
|
||||
}
|
||||
|
||||
const providerData = included?.find(
|
||||
(item) => item.type === "providers" && item.id === providerId,
|
||||
);
|
||||
|
||||
if (!providerData) {
|
||||
return { ...scan, providerInfo: undefined };
|
||||
}
|
||||
|
||||
return {
|
||||
...scan,
|
||||
providerInfo: {
|
||||
provider: providerData.attributes.provider,
|
||||
uid: providerData.attributes.uid,
|
||||
alias: providerData.attributes.alias,
|
||||
},
|
||||
};
|
||||
}) || []
|
||||
);
|
||||
}
|
||||
|
||||
export function ScansTableWithPolling({
|
||||
initialData,
|
||||
initialMeta,
|
||||
searchParams,
|
||||
}: ScansTableWithPollingProps) {
|
||||
const [scansData, setScansData] = useState<ScanProps[]>(initialData);
|
||||
const [meta, setMeta] = useState<MetaDataProps | undefined>(initialMeta);
|
||||
|
||||
const hasExecutingScan = scansData.some((scan) =>
|
||||
EXECUTING_STATES.includes(
|
||||
scan.attributes.state as (typeof EXECUTING_STATES)[number],
|
||||
),
|
||||
);
|
||||
|
||||
const handleRefresh = useCallback(async () => {
|
||||
const page = parseInt(searchParams.page?.toString() || "1", 10);
|
||||
const pageSize = parseInt(searchParams.pageSize?.toString() || "10", 10);
|
||||
const sort = searchParams.sort?.toString();
|
||||
|
||||
const filters = Object.fromEntries(
|
||||
Object.entries(searchParams).filter(
|
||||
([key]) => key.startsWith("filter[") && key !== "scanId",
|
||||
),
|
||||
);
|
||||
|
||||
const query = (filters["filter[search]"] as string) || "";
|
||||
|
||||
const result = await getScans({
|
||||
query,
|
||||
page,
|
||||
sort,
|
||||
filters,
|
||||
pageSize,
|
||||
include: "provider",
|
||||
});
|
||||
|
||||
if (result?.data) {
|
||||
const expanded = expandScansWithProviderInfo(
|
||||
result.data,
|
||||
result.included,
|
||||
);
|
||||
setScansData(expanded);
|
||||
|
||||
if (result && "meta" in result) {
|
||||
setMeta(result.meta as MetaDataProps);
|
||||
}
|
||||
}
|
||||
}, [searchParams]);
|
||||
|
||||
// Listen for scan launch events to trigger an immediate refresh
|
||||
useEffect(() => {
|
||||
const handler = () => {
|
||||
handleRefresh();
|
||||
};
|
||||
window.addEventListener(SCAN_LAUNCHED_EVENT, handler);
|
||||
return () => window.removeEventListener(SCAN_LAUNCHED_EVENT, handler);
|
||||
}, [handleRefresh]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<AutoRefresh
|
||||
hasExecutingScan={hasExecutingScan}
|
||||
onRefresh={handleRefresh}
|
||||
/>
|
||||
<DataTable
|
||||
key={`scans-${scansData.length}-${meta?.pagination?.page}`}
|
||||
columns={ColumnGetScans}
|
||||
data={scansData}
|
||||
metadata={meta}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -21,7 +21,9 @@ export const TableLink = ({ href, label, isDisabled }: TableLinkProps) => {
|
||||
|
||||
return (
|
||||
<Button asChild variant="link" size="sm" className="text-xs">
|
||||
<Link href={href}>{label}</Link>
|
||||
<Link href={href} prefetch={false}>
|
||||
{label}
|
||||
</Link>
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -105,6 +105,7 @@ export class ScansPage extends BasePage {
|
||||
await expect(this.scanTable).toBeVisible();
|
||||
|
||||
// Find a row that contains the account ID (provider UID in Cloud Provider column)
|
||||
// Note: Use a more specific locator strategy if possible in the future
|
||||
const rowWithAccountId = this.scanTable
|
||||
.locator("tbody tr")
|
||||
.filter({ hasText: accountId })
|
||||
|
||||
Reference in New Issue
Block a user