From 032499c29afffee682841d7834705c85a3b07bcf Mon Sep 17 00:00:00 2001 From: Josema Camacho Date: Fri, 16 Jan 2026 13:37:09 +0100 Subject: [PATCH] feat(attack-paths): The complete Attack Paths feature (#9805) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: dependabot[bot] Co-authored-by: César Arroba <19954079+cesararroba@users.noreply.github.com> Co-authored-by: Alan Buscaglia Co-authored-by: Claude Co-authored-by: Andoni Alonso <14891798+andoniaf@users.noreply.github.com> Co-authored-by: Rubén De la Torre Vico Co-authored-by: HugoPBrito Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com> Co-authored-by: Pepe Fagoaga Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com> Co-authored-by: Chandrapal Badshah Co-authored-by: Chandrapal Badshah <12944530+Chan9390@users.noreply.github.com> Co-authored-by: Adrián Peña Co-authored-by: Pedro Martín Co-authored-by: KonstGolfi <73020281+KonstGolfi@users.noreply.github.com> Co-authored-by: lydiavilchez <114735608+lydiavilchez@users.noreply.github.com> Co-authored-by: Prowler Bot Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com> Co-authored-by: StylusFrost <43682773+StylusFrost@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: alejandrobailo Co-authored-by: Alejandro Bailo <59607668+alejandrobailo@users.noreply.github.com> Co-authored-by: Víctor Fernández Poyatos Co-authored-by: bota4go <108249054+bota4go@users.noreply.github.com> Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com> Co-authored-by: Daniel Barranquero Co-authored-by: mchennai <50082780+mchennai@users.noreply.github.com> Co-authored-by: Ryan Nolette Co-authored-by: Ulissis Correa <123517149+ulissisc@users.noreply.github.com> Co-authored-by: Sergio Garcia Co-authored-by: Lee Trout Co-authored-by: Sergio Garcia Co-authored-by: Alan-TheGentleman --- .env | 21 +- .../actions/setup-python-poetry/action.yml | 2 +- .github/workflows/api-security.yml | 4 +- .pre-commit-config.yaml | 3 +- README.md | 17 + api/CHANGELOG.md | 1 + api/poetry.lock | 1320 ++++++++++++++--- api/pyproject.toml | 2 + api/src/backend/api/apps.py | 8 +- api/src/backend/api/attack_paths/__init__.py | 13 + api/src/backend/api/attack_paths/database.py | 144 ++ .../api/attack_paths/query_definitions.py | 514 +++++++ .../api/attack_paths/retryable_session.py | 89 ++ .../backend/api/attack_paths/views_helpers.py | 143 ++ api/src/backend/api/filters.py | 18 + .../dev/8_dev_attack_paths_scans.json | 41 + .../api/migrations/0070_attack_paths_scan.py | 154 ++ api/src/backend/api/models.py | 95 ++ api/src/backend/api/specs/v1.yaml | 796 ++++++++++ .../backend/api/tests/test_attack_paths.py | 172 +++ api/src/backend/api/tests/test_views.py | 438 +++++- api/src/backend/api/v1/serializers.py | 104 ++ api/src/backend/api/v1/urls.py | 4 + api/src/backend/api/v1/views.py | 242 ++- api/src/backend/config/celery.py | 1 + api/src/backend/config/django/devel.py | 6 + api/src/backend/config/django/production.py | 6 + api/src/backend/conftest.py | 128 +- api/src/backend/tasks/beat.py | 7 + .../tasks/jobs/attack_paths/__init__.py | 7 + .../backend/tasks/jobs/attack_paths/aws.py | 237 +++ .../tasks/jobs/attack_paths/db_utils.py | 161 ++ .../tasks/jobs/attack_paths/providers.py | 23 + .../tasks/jobs/attack_paths/prowler.py | 205 +++ .../backend/tasks/jobs/attack_paths/scan.py | 183 +++ .../backend/tasks/jobs/attack_paths/utils.py | 10 + api/src/backend/tasks/jobs/deletion.py | 26 +- api/src/backend/tasks/tasks.py | 53 +- .../tasks/tests/test_attack_paths_scan.py | 416 ++++++ .../backend/tasks/tests/test_connection.py | 2 +- api/src/backend/tasks/tests/test_deletion.py | 128 +- api/src/backend/tasks/tests/test_tasks.py | 85 +- docker-compose-dev.yml | 47 +- docker-compose.yml | 33 + skills/prowler-ci/SKILL.md | 26 +- skills/prowler-test-api/SKILL.md | 22 + ui/.husky/pre-commit | 13 +- ui/CHANGELOG.md | 2 + ui/actions/attack-paths/index.ts | 4 + ui/actions/attack-paths/queries.adapter.ts | 55 + ui/actions/attack-paths/queries.ts | 97 ++ .../attack-paths/query-result.adapter.ts | 164 ++ ui/actions/attack-paths/scans.adapter.ts | 89 ++ ui/actions/attack-paths/scans.ts | 69 + .../(workflow)/_components/index.ts | 2 + .../(workflow)/_components/vertical-steps.tsx | 299 ++++ .../_components/workflow-attack-paths.tsx | 49 + .../attack-paths/(workflow)/layout.tsx | 21 + .../_components/execute-button.tsx | 34 + .../_components/graph/attack-path-graph.tsx | 1168 +++++++++++++++ .../_components/graph/graph-controls.tsx | 93 ++ .../_components/graph/graph-legend.tsx | 508 +++++++ .../_components/graph/graph-loading.tsx | 24 + .../query-builder/_components/graph/index.ts | 5 + .../query-builder/_components/index.ts | 7 + .../_components/node-detail/index.ts | 4 + .../node-detail/node-detail-panel.tsx | 132 ++ .../_components/node-detail/node-findings.tsx | 102 ++ .../_components/node-detail/node-overview.tsx | 109 ++ .../node-detail/node-relationships.tsx | 105 ++ .../node-detail/node-remediation.tsx | 83 ++ .../node-detail/node-resources.tsx | 85 ++ .../_components/query-parameters-form.tsx | 122 ++ .../_components/query-selector.tsx | 46 + .../_components/scan-list-table.tsx | 350 +++++ .../_components/scan-status-badge.tsx | 59 + .../(workflow)/query-builder/_hooks/index.ts | 3 + .../query-builder/_hooks/use-graph-state.ts | 182 +++ .../query-builder/_hooks/use-query-builder.ts | 98 ++ .../query-builder/_hooks/use-wizard-state.ts | 91 ++ .../(workflow)/query-builder/_lib/export.ts | 145 ++ .../(workflow)/query-builder/_lib/format.ts | 25 + .../query-builder/_lib/graph-colors.ts | 139 ++ .../query-builder/_lib/graph-utils.ts | 187 +++ .../(workflow)/query-builder/_lib/index.ts | 22 + .../(workflow)/query-builder/page.tsx | 626 ++++++++ ui/app/(prowler)/attack-paths/page.tsx | 9 + ui/app/(prowler)/findings/page.tsx | 85 +- .../findings/finding-details-sheet.tsx | 46 + ui/components/findings/index.ts | 1 + .../findings/table/column-findings.tsx | 19 +- .../findings/table/finding-detail.tsx | 20 +- ui/components/scans/auto-refresh.tsx | 14 +- .../ui/breadcrumbs/breadcrumb-navigation.tsx | 4 + ui/components/ui/sidebar/menu-item.tsx | 24 +- ui/components/ui/sidebar/menu.tsx | 1 + ui/components/ui/table/status-badge.tsx | 5 +- ui/dependency-log.json | 20 +- ui/lib/menu-list.ts | 14 + ui/package.json | 2 + ui/pnpm-lock.yaml | 26 + ui/styles/globals.css | 3 + ui/types/attack-paths.ts | 245 +++ ui/types/components.ts | 1 + 104 files changed, 11792 insertions(+), 322 deletions(-) create mode 100644 api/src/backend/api/attack_paths/__init__.py create mode 100644 api/src/backend/api/attack_paths/database.py create mode 100644 api/src/backend/api/attack_paths/query_definitions.py create mode 100644 api/src/backend/api/attack_paths/retryable_session.py create mode 100644 api/src/backend/api/attack_paths/views_helpers.py create mode 100644 api/src/backend/api/fixtures/dev/8_dev_attack_paths_scans.json create mode 100644 api/src/backend/api/migrations/0070_attack_paths_scan.py create mode 100644 api/src/backend/api/tests/test_attack_paths.py create mode 100644 api/src/backend/tasks/jobs/attack_paths/__init__.py create mode 100644 api/src/backend/tasks/jobs/attack_paths/aws.py create mode 100644 api/src/backend/tasks/jobs/attack_paths/db_utils.py create mode 100644 api/src/backend/tasks/jobs/attack_paths/providers.py create mode 100644 api/src/backend/tasks/jobs/attack_paths/prowler.py create mode 100644 api/src/backend/tasks/jobs/attack_paths/scan.py create mode 100644 api/src/backend/tasks/jobs/attack_paths/utils.py create mode 100644 api/src/backend/tasks/tests/test_attack_paths_scan.py create mode 100644 ui/actions/attack-paths/index.ts create mode 100644 ui/actions/attack-paths/queries.adapter.ts create mode 100644 ui/actions/attack-paths/queries.ts create mode 100644 ui/actions/attack-paths/query-result.adapter.ts create mode 100644 ui/actions/attack-paths/scans.adapter.ts create mode 100644 ui/actions/attack-paths/scans.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/_components/index.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/_components/vertical-steps.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/_components/workflow-attack-paths.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/layout.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/execute-button.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/attack-path-graph.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-controls.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-legend.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-loading.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/index.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/index.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/index.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-detail-panel.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-findings.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-overview.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-relationships.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-remediation.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-resources.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/query-parameters-form.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/query-selector.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/scan-list-table.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/scan-status-badge.tsx create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/index.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-graph-state.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-query-builder.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-wizard-state.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/export.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/format.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/graph-colors.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/graph-utils.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/index.ts create mode 100644 ui/app/(prowler)/attack-paths/(workflow)/query-builder/page.tsx create mode 100644 ui/app/(prowler)/attack-paths/page.tsx create mode 100644 ui/components/findings/finding-details-sheet.tsx create mode 100644 ui/types/attack-paths.ts diff --git a/.env b/.env index 0e3a9d5610..ac2af1631f 100644 --- a/.env +++ b/.env @@ -48,6 +48,26 @@ POSTGRES_DB=prowler_db # POSTGRES_REPLICA_MAX_ATTEMPTS=3 # POSTGRES_REPLICA_RETRY_BASE_DELAY=0.5 +# Neo4j auth +NEO4J_HOST=neo4j +NEO4J_PORT=7687 +NEO4J_USER=neo4j +NEO4J_PASSWORD=neo4j_password +# Neo4j settings +NEO4J_DBMS_MAX__DATABASES=1000000 +NEO4J_SERVER_MEMORY_PAGECACHE_SIZE=1G +NEO4J_SERVER_MEMORY_HEAP_INITIAL__SIZE=1G +NEO4J_SERVER_MEMORY_HEAP_MAX__SIZE=1G +NEO4J_POC_EXPORT_FILE_ENABLED=true +NEO4J_APOC_IMPORT_FILE_ENABLED=true +NEO4J_APOC_IMPORT_FILE_USE_NEO4J_CONFIG=true +NEO4J_PLUGINS=["apoc"] +NEO4J_DBMS_SECURITY_PROCEDURES_ALLOWLIST=apoc.* +NEO4J_DBMS_SECURITY_PROCEDURES_UNRESTRICTED=apoc.* +NEO4J_DBMS_CONNECTOR_BOLT_LISTEN_ADDRESS=0.0.0.0:7687 +# Neo4j Prowler settings +NEO4J_INSERT_BATCH_SIZE=500 + # Celery-Prowler task settings TASK_RETRY_DELAY_SECONDS=0.1 TASK_RETRY_ATTEMPTS=5 @@ -117,7 +137,6 @@ SENTRY_ENVIRONMENT=local SENTRY_RELEASE=local NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT} - #### Prowler release version #### NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.16.0 diff --git a/.github/actions/setup-python-poetry/action.yml b/.github/actions/setup-python-poetry/action.yml index 790f3ef0e6..cb17c050a2 100644 --- a/.github/actions/setup-python-poetry/action.yml +++ b/.github/actions/setup-python-poetry/action.yml @@ -29,7 +29,7 @@ runs: run: | BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}" echo "Using branch: $BRANCH_NAME" - sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml + sed -i "s|\(git+https://github.com/prowler-cloud/prowler[^@]*\)@master|\1@$BRANCH_NAME|g" pyproject.toml - name: Install poetry shell: bash diff --git a/.github/workflows/api-security.yml b/.github/workflows/api-security.yml index 285262ce7f..ed1aa2cc58 100644 --- a/.github/workflows/api-security.yml +++ b/.github/workflows/api-security.yml @@ -61,7 +61,9 @@ jobs: - name: Safety if: steps.check-changes.outputs.any_changed == 'true' - run: poetry run safety check + run: poetry run safety check --ignore 79023,79027,82754 + # TODO: 79023 & 79027 knack ReDoS until `azure-cli-core` (via `cartography`) allows `knack` >=0.13.0 + # TODO: 82754 filelock TOCTOU symlink vulnerability - transitive dep, awaiting upstream fix - name: Vulture if: steps.check-changes.outputs.any_changed == 'true' diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9cd10afb0d..0a4164bc43 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -120,7 +120,8 @@ repos: name: safety description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities" # TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X - entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353,77744,77745' + # TODO: 79023 & 79027 knack ReDoS until `azure-cli-core` (via `cartography`) allows `knack` >=0.13.0 + entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353,77744,77745,79023,79027' language: system - id: vulture diff --git a/README.md b/README.md index bdf08ee20a..928ebc0655 100644 --- a/README.md +++ b/README.md @@ -80,6 +80,23 @@ prowler dashboard ``` ![Prowler Dashboard](docs/images/products/dashboard.png) + +## Attack Paths + +Attack Paths automatically extends every completed AWS scan with a Neo4j graph that combines Cartography's cloud inventory with Prowler findings. The feature runs in the API worker after each scan and therefore requires: + +- An accessible Neo4j instance (the Docker Compose files already ships a `neo4j` service). +- The following environment variables so Django and Celery can connect: + + | Variable | Description | Default | + | --- | --- | --- | + | `NEO4J_HOST` | Hostname used by the API containers. | `neo4j` | + | `NEO4J_PORT` | Bolt port exposed by Neo4j. | `7687` | + | `NEO4J_USER` / `NEO4J_PASSWORD` | Credentials with rights to create per-tenant databases. | `neo4j` / `neo4j_password` | + +Every AWS provider scan will enqueue an Attack Paths ingestion job automatically. Other cloud providers will be added in future iterations. + + # Prowler at a Glance > [!Tip] > For the most accurate and up-to-date information about checks, services, frameworks, and categories, visit [**Prowler Hub**](https://hub.prowler.com). diff --git a/api/CHANGELOG.md b/api/CHANGELOG.md index 7373464dfd..4ad9fd92e2 100644 --- a/api/CHANGELOG.md +++ b/api/CHANGELOG.md @@ -10,6 +10,7 @@ All notable changes to the **Prowler API** are documented in this file. - `/api/v1/overviews/resource-groups` to retrieve an overview of the resource groups based on finding severities [(#9694)](https://github.com/prowler-cloud/prowler/pull/9694) - Endpoints `GET /findings` and `GET /findings/metadata/latest` now support the `group` filter [(#9694)](https://github.com/prowler-cloud/prowler/pull/9694) - `provider_id` and `provider_id__in` filter aliases for findings endpoints to enable consistent frontend parameter naming [(#9701)](https://github.com/prowler-cloud/prowler/pull/9701) +- Attack Paths scans for AWS providers with new Neo4j Docker service, new Celery task, new `AttackPathsScan` model and new endpoints under `/api/v1/attack-paths-scans` [(#9805)](https://github.com/prowler-cloud/prowler/pull/9805) --- diff --git a/api/poetry.lock b/api/poetry.lock index bde85eb28a..3e54af3d7c 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. [[package]] name = "about-time" @@ -12,6 +12,71 @@ files = [ {file = "about_time-4.2.1-py3-none-any.whl", hash = "sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341"}, ] +[[package]] +name = "adal" +version = "1.2.7" +description = "Note: This library is already replaced by MSAL Python, available here: https://pypi.org/project/msal/ .ADAL Python remains available here as a legacy. The ADAL for Python library makes it easy for python application to authenticate to Azure Active Directory (AAD) in order to access AAD protected web resources." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "adal-1.2.7-py2.py3-none-any.whl", hash = "sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d"}, + {file = "adal-1.2.7.tar.gz", hash = "sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1"}, +] + +[package.dependencies] +cryptography = ">=1.1.0" +PyJWT = ">=1.0.0,<3" +python-dateutil = ">=2.1.0,<3" +requests = ">=2.0.0,<3" + +[[package]] +name = "aioboto3" +version = "15.5.0" +description = "Async boto3 wrapper" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aioboto3-15.5.0-py3-none-any.whl", hash = "sha256:cc880c4d6a8481dd7e05da89f41c384dbd841454fc1998ae25ca9c39201437a6"}, + {file = "aioboto3-15.5.0.tar.gz", hash = "sha256:ea8d8787d315594842fbfcf2c4dce3bac2ad61be275bc8584b2ce9a3402a6979"}, +] + +[package.dependencies] +aiobotocore = {version = "2.25.1", extras = ["boto3"]} +aiofiles = ">=23.2.1" + +[package.extras] +chalice = ["chalice (>=1.24.0)"] +s3cse = ["cryptography (>=44.0.1)"] + +[[package]] +name = "aiobotocore" +version = "2.25.1" +description = "Async client for aws services using botocore and aiohttp" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiobotocore-2.25.1-py3-none-any.whl", hash = "sha256:eb6daebe3cbef5b39a0bb2a97cffbe9c7cb46b2fcc399ad141f369f3c2134b1f"}, + {file = "aiobotocore-2.25.1.tar.gz", hash = "sha256:ea9be739bfd7ece8864f072ec99bb9ed5c7e78ebb2b0b15f29781fbe02daedbc"}, +] + +[package.dependencies] +aiohttp = ">=3.9.2,<4.0.0" +aioitertools = ">=0.5.1,<1.0.0" +boto3 = {version = ">=1.40.46,<1.40.62", optional = true, markers = "extra == \"boto3\""} +botocore = ">=1.40.46,<1.40.62" +jmespath = ">=0.7.1,<2.0.0" +multidict = ">=6.0.0,<7.0.0" +python-dateutil = ">=2.1,<3.0.0" +wrapt = ">=1.10.10,<2.0.0" + +[package.extras] +awscli = ["awscli (>=1.42.46,<1.42.62)"] +boto3 = ["boto3 (>=1.40.46,<1.40.62)"] +httpx = ["httpx (>=0.25.1,<0.29)"] + [[package]] name = "aiofiles" version = "24.1.0" @@ -178,6 +243,18 @@ yarl = ">=1.17.0,<2.0" [package.extras] speedups = ["Brotli (>=1.2) ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "backports.zstd ; platform_python_implementation == \"CPython\" and python_version < \"3.14\"", "brotlicffi (>=1.2) ; platform_python_implementation != \"CPython\""] +[[package]] +name = "aioitertools" +version = "0.13.0" +description = "itertools and builtins for AsyncIO and mixed iterables" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aioitertools-0.13.0-py3-none-any.whl", hash = "sha256:0be0292b856f08dfac90e31f4739432f4cb6d7520ab9eb73e143f4f2fa5259be"}, + {file = "aioitertools-0.13.0.tar.gz", hash = "sha256:620bd241acc0bbb9ec819f1ab215866871b4bbd1f73836a55f799200ee86950c"}, +] + [[package]] name = "aiosignal" version = "1.4.0" @@ -451,17 +528,18 @@ alibabacloud_credentials = ">=0.3.4" [[package]] name = "alibabacloud-openapi-util" -version = "0.2.2" +version = "0.2.4" description = "Aliyun Tea OpenApi Library for Python" optional = false python-versions = "*" groups = ["main"] files = [ - {file = "alibabacloud_openapi_util-0.2.2.tar.gz", hash = "sha256:ebbc3906f554cb4bf8f513e43e8a33e8b6a3d4a0ef13617a0e14c3dda8ef52a8"}, + {file = "alibabacloud_openapi_util-0.2.4-py3-none-any.whl", hash = "sha256:a2474f230b5965ae9a8c286e0dc86132a887928d02d20b8182656cf6b1b6c5bd"}, + {file = "alibabacloud_openapi_util-0.2.4.tar.gz", hash = "sha256:87022b9dcb7593a601f7a40ca698227ac3ccb776b58cb7b06b8dc7f510995c34"}, ] [package.dependencies] -alibabacloud_tea_util = ">=0.0.2" +alibabacloud-tea-util = ">=0.3.13,<1.0.0" cryptography = ">=3.0.0" [[package]] @@ -757,6 +835,18 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] trio = ["trio (>=0.26.1)"] +[[package]] +name = "applicationinsights" +version = "0.11.10" +description = "This project extends the Application Insights API surface to support Python." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "applicationinsights-0.11.10-py2.py3-none-any.whl", hash = "sha256:e89a890db1c6906b6a7d0bcfd617dac83974773c64573147c8d6654f9cf2a6ea"}, + {file = "applicationinsights-0.11.10.tar.gz", hash = "sha256:0b761f3ef0680acf4731906dfc1807faa6f2a57168ae74592db0084a6099f7b3"}, +] + [[package]] name = "apscheduler" version = "3.11.2" @@ -785,6 +875,21 @@ tornado = ["tornado (>=4.3)"] twisted = ["twisted"] zookeeper = ["kazoo"] +[[package]] +name = "argcomplete" +version = "3.5.3" +description = "Bash tab completion for argparse" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "argcomplete-3.5.3-py3-none-any.whl", hash = "sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61"}, + {file = "argcomplete-3.5.3.tar.gz", hash = "sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392"}, +] + +[package.extras] +test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] + [[package]] name = "asgiref" version = "3.9.1" @@ -887,6 +992,58 @@ files = [ {file = "awsipranges-0.3.3.tar.gz", hash = "sha256:4f0b3f22a9dc1163c85b513bed812b6c92bdacd674e6a7b68252a3c25b99e2c0"}, ] +[[package]] +name = "azure-cli-core" +version = "2.81.0" +description = "Microsoft Azure Command-Line Tools Core Module" +optional = false +python-versions = ">=3.10.0" +groups = ["main"] +files = [ + {file = "azure_cli_core-2.81.0-py3-none-any.whl", hash = "sha256:2c711ff890d003b10bb40f932d008a872c6451324ee533fbbb861c950c3f0f93"}, + {file = "azure_cli_core-2.81.0.tar.gz", hash = "sha256:bae30dcd6f8c1883c8e32fbcb7c43dbe2dbfe522e428c7629edef271933f7b1e"}, +] + +[package.dependencies] +argcomplete = ">=3.5.2,<3.6.0" +azure-cli-telemetry = "==1.1.0.*" +azure-core = ">=1.35.0,<1.36.0" +azure-mgmt-core = ">=1.2.0,<2" +cryptography = "*" +distro = {version = "*", markers = "sys_platform == \"linux\""} +humanfriendly = ">=10.0,<11.0" +jmespath = "*" +knack = ">=0.11.0,<0.12.0" +microsoft-security-utilities-secret-masker = ">=1.0.0b4,<1.1.0" +msal = [ + {version = "1.34.0b1", extras = ["broker"], markers = "sys_platform == \"win32\""}, + {version = "1.34.0b1", markers = "sys_platform != \"win32\""}, +] +msal-extensions = "1.2.0" +packaging = ">=20.9" +pkginfo = ">=1.5.0.1" +psutil = {version = ">=5.9", markers = "sys_platform != \"cygwin\""} +py-deviceid = "*" +PyJWT = ">=2.1.0" +pyopenssl = ">=17.1.0" +requests = {version = "*", extras = ["socks"]} + +[[package]] +name = "azure-cli-telemetry" +version = "1.1.0" +description = "Microsoft Azure CLI Telemetry Package" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "azure-cli-telemetry-1.1.0.tar.gz", hash = "sha256:d922379cda1b48952be75fb3bd2ac5e7ceecf569492a6088bab77894c624a278"}, + {file = "azure_cli_telemetry-1.1.0-py3-none-any.whl", hash = "sha256:2fc12608c0cf0ea6e69b392af9cab92f1249340b8caff7e9674cf91b3becb337"}, +] + +[package.dependencies] +applicationinsights = ">=0.11.1,<0.12" +portalocker = ">=1.6,<3" + [[package]] name = "azure-common" version = "1.1.28" @@ -1028,6 +1185,23 @@ azure-mgmt-core = ">=1.3.2" isodate = ">=0.6.1" typing-extensions = ">=4.6.0" +[[package]] +name = "azure-mgmt-containerinstance" +version = "10.1.0" +description = "Microsoft Azure Container Instance Client Library for Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "azure-mgmt-containerinstance-10.1.0.zip", hash = "sha256:78d437adb28574f448c838ed5f01f9ced378196098061deb59d9f7031704c17e"}, + {file = "azure_mgmt_containerinstance-10.1.0-py3-none-any.whl", hash = "sha256:ee7977b7b70f2233e44ec6ce8c99027f3f7892bb3452b4bad46df340d9f98959"}, +] + +[package.dependencies] +azure-common = ">=1.1,<2.0" +azure-mgmt-core = ">=1.3.2,<2.0.0" +isodate = ">=0.6.1,<1.0.0" + [[package]] name = "azure-mgmt-containerregistry" version = "12.0.0" @@ -1114,6 +1288,42 @@ azure-common = ">=1.1,<2.0" azure-mgmt-core = ">=1.3.2,<2.0.0" isodate = ">=0.6.1,<1.0.0" +[[package]] +name = "azure-mgmt-datafactory" +version = "9.2.0" +description = "Microsoft Azure Data Factory Management Client Library for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "azure_mgmt_datafactory-9.2.0-py3-none-any.whl", hash = "sha256:d870a7a6099227e91d1c258a956c2aa32c2ea4c0a4409913d8f215887349f128"}, + {file = "azure_mgmt_datafactory-9.2.0.tar.gz", hash = "sha256:5132e9c24c441ac225f2a60225924baa55079ca81eff7db99a70d661d64bb0d7"}, +] + +[package.dependencies] +azure-common = ">=1.1" +azure-mgmt-core = ">=1.3.2" +isodate = ">=0.6.1" +typing-extensions = ">=4.6.0" + +[[package]] +name = "azure-mgmt-eventgrid" +version = "10.4.0" +description = "Microsoft Azure Event Grid Management Client Library for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "azure_mgmt_eventgrid-10.4.0-py3-none-any.whl", hash = "sha256:5e4637245bbff33298d5f427971b870dbb03d873a3ef68f328190a7b7a38c56f"}, + {file = "azure_mgmt_eventgrid-10.4.0.tar.gz", hash = "sha256:303e5e27cf4bb5ec833ba4e5a9ef70b5bc410e190412ec47cde59d82e413fb7e"}, +] + +[package.dependencies] +azure-common = ">=1.1" +azure-mgmt-core = ">=1.3.2" +isodate = ">=0.6.1" +typing-extensions = ">=4.6.0" + [[package]] name = "azure-mgmt-keyvault" version = "10.3.1" @@ -1149,6 +1359,23 @@ azure-common = ">=1.1,<2.0" azure-mgmt-core = ">=1.2.0,<2.0.0" msrest = ">=0.6.21" +[[package]] +name = "azure-mgmt-logic" +version = "10.0.0" +description = "Microsoft Azure Logic Apps Management Client Library for Python" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "azure-mgmt-logic-10.0.0.zip", hash = "sha256:b3fa4864f14aaa7af41d778d925f051ed29b6016f46344765ecd0f49d0f04dd6"}, + {file = "azure_mgmt_logic-10.0.0-py3-none-any.whl", hash = "sha256:525c78afedf3edb35eb0a16152c8beba89769ee1bc6af01bcdc42842a551e443"}, +] + +[package.dependencies] +azure-common = ">=1.1,<2.0" +azure-mgmt-core = ">=1.3.0,<2.0.0" +msrest = ">=0.6.21" + [[package]] name = "azure-mgmt-monitor" version = "6.0.2" @@ -1415,6 +1642,18 @@ typing-extensions = ">=4.6.0" [package.extras] aio = ["azure-core[aio] (>=1.30.0)"] +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + [[package]] name = "bandit" version = "1.7.9" @@ -1466,34 +1705,34 @@ files = [ [[package]] name = "boto3" -version = "1.39.15" +version = "1.40.61" description = "The AWS SDK for Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "boto3-1.39.15-py3-none-any.whl", hash = "sha256:38fc54576b925af0075636752de9974e172c8a2cf7133400e3e09b150d20fb6a"}, - {file = "boto3-1.39.15.tar.gz", hash = "sha256:b4483625f0d8c35045254dee46cd3c851bbc0450814f20b9b25bee1b5c0d8409"}, + {file = "boto3-1.40.61-py3-none-any.whl", hash = "sha256:6b9c57b2a922b5d8c17766e29ed792586a818098efe84def27c8f582b33f898c"}, + {file = "boto3-1.40.61.tar.gz", hash = "sha256:d6c56277251adf6c2bdd25249feae625abe4966831676689ff23b4694dea5b12"}, ] [package.dependencies] -botocore = ">=1.39.15,<1.40.0" +botocore = ">=1.40.61,<1.41.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.13.0,<0.14.0" +s3transfer = ">=0.14.0,<0.15.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.39.15" +version = "1.40.61" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "botocore-1.39.15-py3-none-any.whl", hash = "sha256:eb9cfe918ebfbfb8654e1b153b29f0c129d586d2c0d7fb4032731d49baf04cff"}, - {file = "botocore-1.39.15.tar.gz", hash = "sha256:2aa29a717f14f8c7ca058c2e297aaed0aa10ecea24b91514eee802814d1b7600"}, + {file = "botocore-1.40.61-py3-none-any.whl", hash = "sha256:17ebae412692fd4824f99cde0f08d50126dc97954008e5ba2b522eb049238aa7"}, + {file = "botocore-1.40.61.tar.gz", hash = "sha256:a2487ad69b090f9cccd64cf07c7021cd80ee9c0655ad974f87045b02f3ef52cd"}, ] [package.dependencies] @@ -1502,7 +1741,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.23.8)"] +crt = ["awscrt (==0.27.6)"] [[package]] name = "cachetools" @@ -1516,6 +1755,75 @@ files = [ {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] +[[package]] +name = "cartography" +version = "0.0.1.dev1268+gc134846c0" +description = "Explore assets and their relationships across your technical infrastructure." +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [] +develop = false + +[package.dependencies] +adal = ">=1.2.4" +aioboto3 = ">=13.0.0" +azure-cli-core = ">=2.26.0" +azure-identity = ">=1.5.0" +azure-mgmt-authorization = ">=0.60.0" +azure-mgmt-compute = ">=5.0.0" +azure-mgmt-containerinstance = ">=10.0.0" +azure-mgmt-containerservice = ">=30.0.0" +azure-mgmt-cosmosdb = ">=6.0.0" +azure-mgmt-datafactory = ">=8.0.0" +azure-mgmt-eventgrid = ">=10.0.0" +azure-mgmt-logic = ">=10.0.0" +azure-mgmt-monitor = ">=3.0.0" +azure-mgmt-network = ">=25.0.0" +azure-mgmt-resource = ">=10.2.0" +azure-mgmt-security = ">=5.0.0" +azure-mgmt-sql = ">=3.0.1,<4" +azure-mgmt-storage = ">=16.0.0" +azure-mgmt-web = ">=7.0.0" +backoff = ">=2.1.2" +boto3 = ">=1.15.1" +botocore = ">=1.18.1" +cloudflare = ">=4.1.0,<5.0.0" +crowdstrike-falconpy = ">=0.5.1" +dnspython = ">=1.15.0" +duo-client = "*" +google-api-python-client = ">=1.7.8" +google-auth = ">=2.37.0" +google-cloud-asset = ">=1.0.0" +google-cloud-resource-manager = ">=1.14.2" +httpx = ">=0.24.0" +kubernetes = ">=22.6.0" +marshmallow = ">=3.0.0rc7" +msgraph-sdk = "*" +msrestazure = ">=0.6.4" +neo4j = ">=5.28.2,<6.0.0" +oci = ">=2.71.0" +okta = "<1.0.0" +packaging = "*" +pdpyras = ">=4.3.0" +policyuniverse = ">=1.1.0.0" +python-dateutil = "*" +python-digitalocean = ">=1.16.0" +pyyaml = ">=5.3.1" +requests = ">=2.22.0" +scaleway = ">=2.10.0" +slack-sdk = ">=3.37.0" +statsd = "*" +typer = ">=0.9.0" +types-aiobotocore-ecr = "*" +xmltodict = "*" + +[package.source] +type = "git" +url = "https://github.com/prowler-cloud/cartography" +reference = "master" +resolved_reference = "c134846c0db64747340f880cf0b5085f5e473e03" + [[package]] name = "celery" version = "5.4.0" @@ -1852,6 +2160,26 @@ prompt-toolkit = ">=3.0.36" [package.extras] testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] +[[package]] +name = "cloudflare" +version = "4.3.1" +description = "The official Python library for the cloudflare API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "cloudflare-4.3.1-py3-none-any.whl", hash = "sha256:6927135a5ee5633d6e2e1952ca0484745e933727aeeb189996d2ad9d292071c6"}, + {file = "cloudflare-4.3.1.tar.gz", hash = "sha256:b1e1c6beeb8d98f63bfe0a1cba874fc4e22e000bcc490544f956c689b3b5b258"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +typing-extensions = ">=4.10,<5" + [[package]] name = "colorama" version = "0.4.6" @@ -1863,7 +2191,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {dev = "platform_system == \"Windows\" or sys_platform == \"win32\""} +markers = {dev = "sys_platform == \"win32\" or platform_system == \"Windows\""} [[package]] name = "contextlib2" @@ -2049,6 +2377,25 @@ files = [ [package.extras] dev = ["polib"] +[[package]] +name = "crowdstrike-falconpy" +version = "1.5.4" +description = "The CrowdStrike Falcon SDK for Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "crowdstrike_falconpy-1.5.4-py3-none-any.whl", hash = "sha256:f698d4571d0dfac1dc450a69329959e1827d49a92ce9f81d2f9963f6da41b670"}, + {file = "crowdstrike_falconpy-1.5.4.tar.gz", hash = "sha256:b357850664639af5e8441d1ed5efb905b03af74c7c5f49b4ade161202e8e45c7"}, +] + +[package.dependencies] +requests = "*" +urllib3 = "*" + +[package.extras] +dev = ["bandit", "coverage", "flake8", "pydocstyle", "pylint", "pytest", "pytest-cov"] + [[package]] name = "cryptography" version = "44.0.1" @@ -2781,6 +3128,21 @@ merge = ["merge3"] paramiko = ["paramiko"] pgp = ["gpg"] +[[package]] +name = "duo-client" +version = "5.5.0" +description = "Reference client for Duo Security APIs" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "duo_client-5.5.0-py3-none-any.whl", hash = "sha256:4fbf1e97a2b25ef64e9f88171ab817162cf45bafc1c63026af4883baf8892a12"}, + {file = "duo_client-5.5.0.tar.gz", hash = "sha256:303109e047fe7525ba4fc4a294c1f3deb4125066e89c10d33f7430378867b1d6"}, +] + +[package.dependencies] +setuptools = "*" + [[package]] name = "durationpy" version = "0.10" @@ -3148,6 +3510,8 @@ files = [ [package.dependencies] google-auth = ">=2.14.1,<3.0.0" googleapis-common-protos = ">=1.56.2,<2.0.0" +grpcio = {version = ">=1.49.1,<2.0.0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} +grpcio-status = {version = ">=1.49.1,<2.0.0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} proto-plus = ">=1.22.3,<2.0.0" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" requests = ">=2.18.0,<3.0.0" @@ -3220,6 +3584,103 @@ files = [ google-auth = "*" httplib2 = ">=0.19.0" +[[package]] +name = "google-cloud-access-context-manager" +version = "0.3.0" +description = "Google Cloud Access Context Manager Protobufs" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_cloud_access_context_manager-0.3.0-py3-none-any.whl", hash = "sha256:5d15ad51547f06c281e35f16b4ffcb3e98bb2d898b01470f88b94edfb2eeb0a3"}, + {file = "google_cloud_access_context_manager-0.3.0.tar.gz", hash = "sha256:f3aa35c9225b7aaef85ecdacedcc1577789be8d458b7a41b6ad23b504786e5f9"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0", extras = ["grpc"]} +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[[package]] +name = "google-cloud-asset" +version = "4.1.0" +description = "Google Cloud Asset API client library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_cloud_asset-4.1.0-py3-none-any.whl", hash = "sha256:2ce567bf002ad5099e173f6106393a7abc4782598c5d0d27de7d86ac26736e06"}, + {file = "google_cloud_asset-4.1.0.tar.gz", hash = "sha256:00ba110085ff9f284b49961bcb9d2da5b5863fb91643c16d173ed38d73bfe35c"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0" +google-cloud-access-context-manager = ">=0.1.2,<1.0.0" +google-cloud-org-policy = ">=0.1.2,<2.0.0" +google-cloud-os-config = ">=1.0.0,<2.0.0" +grpc-google-iam-v1 = ">=0.14.0,<1.0.0" +grpcio = ">=1.33.2,<2.0.0" +proto-plus = ">=1.22.3,<2.0.0" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[[package]] +name = "google-cloud-org-policy" +version = "1.15.0" +description = "Google Cloud Org Policy API client library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_cloud_org_policy-1.15.0-py3-none-any.whl", hash = "sha256:5da410288236b334b8d05010501ea6180c5dc9e30888ff09488f2f107632f35b"}, + {file = "google_cloud_org_policy-1.15.0.tar.gz", hash = "sha256:271d16a10e75347eace60d02cde322b2b1b613bcc99917109e0ebf2a4102253a"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0" +grpcio = ">=1.33.2,<2.0.0" +proto-plus = ">=1.22.3,<2.0.0" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[[package]] +name = "google-cloud-os-config" +version = "1.22.0" +description = "Google Cloud Os Config API client library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_cloud_os_config-1.22.0-py3-none-any.whl", hash = "sha256:92afa402aa3b94d765751907fded1ef5908a6a5322f1fc88dee9e4c7f1cd7e54"}, + {file = "google_cloud_os_config-1.22.0.tar.gz", hash = "sha256:d79a310f6fa1ce7470aaa084c70e38dc05d98531f468f821b3a526e4d33a70e4"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0" +grpcio = ">=1.33.2,<2.0.0" +proto-plus = ">=1.22.3,<2.0.0" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[[package]] +name = "google-cloud-resource-manager" +version = "1.15.0" +description = "Google Cloud Resource Manager API client library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_cloud_resource_manager-1.15.0-py3-none-any.whl", hash = "sha256:0ccde5db644b269ddfdf7b407a2c7b60bdbf459f8e666344a5285601d00c7f6d"}, + {file = "google_cloud_resource_manager-1.15.0.tar.gz", hash = "sha256:3d0b78c3daa713f956d24e525b35e9e9a76d597c438837171304d431084cedaf"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0" +grpc-google-iam-v1 = ">=0.14.0,<1.0.0" +grpcio = ">=1.33.2,<2.0.0" +proto-plus = ">=1.22.3,<2.0.0" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + [[package]] name = "googleapis-common-protos" version = "1.70.0" @@ -3233,6 +3694,7 @@ files = [ ] [package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0", optional = true, markers = "extra == \"grpc\""} protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" [package.extras] @@ -3347,6 +3809,117 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil", "setuptools"] +[[package]] +name = "grpc-google-iam-v1" +version = "0.14.3" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "grpc_google_iam_v1-0.14.3-py3-none-any.whl", hash = "sha256:7a7f697e017a067206a3dfef44e4c634a34d3dee135fe7d7a4613fe3e59217e6"}, + {file = "grpc_google_iam_v1-0.14.3.tar.gz", hash = "sha256:879ac4ef33136c5491a6300e27575a9ec760f6cdf9a2518798c1b8977a5dc389"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[[package]] +name = "grpcio" +version = "1.76.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "grpcio-1.76.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:65a20de41e85648e00305c1bb09a3598f840422e522277641145a32d42dcefcc"}, + {file = "grpcio-1.76.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:40ad3afe81676fd9ec6d9d406eda00933f218038433980aa19d401490e46ecde"}, + {file = "grpcio-1.76.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:035d90bc79eaa4bed83f524331d55e35820725c9fbb00ffa1904d5550ed7ede3"}, + {file = "grpcio-1.76.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4215d3a102bd95e2e11b5395c78562967959824156af11fa93d18fdd18050990"}, + {file = "grpcio-1.76.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:49ce47231818806067aea3324d4bf13825b658ad662d3b25fada0bdad9b8a6af"}, + {file = "grpcio-1.76.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8cc3309d8e08fd79089e13ed4819d0af72aa935dd8f435a195fd152796752ff2"}, + {file = "grpcio-1.76.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:971fd5a1d6e62e00d945423a567e42eb1fa678ba89072832185ca836a94daaa6"}, + {file = "grpcio-1.76.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9d9adda641db7207e800a7f089068f6f645959f2df27e870ee81d44701dd9db3"}, + {file = "grpcio-1.76.0-cp310-cp310-win32.whl", hash = "sha256:063065249d9e7e0782d03d2bca50787f53bd0fb89a67de9a7b521c4a01f1989b"}, + {file = "grpcio-1.76.0-cp310-cp310-win_amd64.whl", hash = "sha256:a6ae758eb08088d36812dd5d9af7a9859c05b1e0f714470ea243694b49278e7b"}, + {file = "grpcio-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a"}, + {file = "grpcio-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c"}, + {file = "grpcio-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465"}, + {file = "grpcio-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48"}, + {file = "grpcio-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da"}, + {file = "grpcio-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397"}, + {file = "grpcio-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749"}, + {file = "grpcio-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00"}, + {file = "grpcio-1.76.0-cp311-cp311-win32.whl", hash = "sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054"}, + {file = "grpcio-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d"}, + {file = "grpcio-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8"}, + {file = "grpcio-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280"}, + {file = "grpcio-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4"}, + {file = "grpcio-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f8f757bebaaea112c00dba718fc0d3260052ce714e25804a03f93f5d1c6cc11"}, + {file = "grpcio-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:980a846182ce88c4f2f7e2c22c56aefd515daeb36149d1c897f83cf57999e0b6"}, + {file = "grpcio-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f92f88e6c033db65a5ae3d97905c8fea9c725b63e28d5a75cb73b49bda5024d8"}, + {file = "grpcio-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4baf3cbe2f0be3289eb68ac8ae771156971848bb8aaff60bad42005539431980"}, + {file = "grpcio-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:615ba64c208aaceb5ec83bfdce7728b80bfeb8be97562944836a7a0a9647d882"}, + {file = "grpcio-1.76.0-cp312-cp312-win32.whl", hash = "sha256:45d59a649a82df5718fd9527ce775fd66d1af35e6d31abdcdc906a49c6822958"}, + {file = "grpcio-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:c088e7a90b6017307f423efbb9d1ba97a22aa2170876223f9709e9d1de0b5347"}, + {file = "grpcio-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:26ef06c73eb53267c2b319f43e6634c7556ea37672029241a056629af27c10e2"}, + {file = "grpcio-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:45e0111e73f43f735d70786557dc38141185072d7ff8dc1829d6a77ac1471468"}, + {file = "grpcio-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83d57312a58dcfe2a3a0f9d1389b299438909a02db60e2f2ea2ae2d8034909d3"}, + {file = "grpcio-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3e2a27c89eb9ac3d81ec8835e12414d73536c6e620355d65102503064a4ed6eb"}, + {file = "grpcio-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61f69297cba3950a524f61c7c8ee12e55c486cb5f7db47ff9dcee33da6f0d3ae"}, + {file = "grpcio-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6a15c17af8839b6801d554263c546c69c4d7718ad4321e3166175b37eaacca77"}, + {file = "grpcio-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:25a18e9810fbc7e7f03ec2516addc116a957f8cbb8cbc95ccc80faa072743d03"}, + {file = "grpcio-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:931091142fd8cc14edccc0845a79248bc155425eee9a98b2db2ea4f00a235a42"}, + {file = "grpcio-1.76.0-cp313-cp313-win32.whl", hash = "sha256:5e8571632780e08526f118f74170ad8d50fb0a48c23a746bef2a6ebade3abd6f"}, + {file = "grpcio-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:f9f7bd5faab55f47231ad8dba7787866b69f5e93bc306e3915606779bbfb4ba8"}, + {file = "grpcio-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:ff8a59ea85a1f2191a0ffcc61298c571bc566332f82e5f5be1b83c9d8e668a62"}, + {file = "grpcio-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06c3d6b076e7b593905d04fdba6a0525711b3466f43b3400266f04ff735de0cd"}, + {file = "grpcio-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fd5ef5932f6475c436c4a55e4336ebbe47bd3272be04964a03d316bbf4afbcbc"}, + {file = "grpcio-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b331680e46239e090f5b3cead313cc772f6caa7d0fc8de349337563125361a4a"}, + {file = "grpcio-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2229ae655ec4e8999599469559e97630185fdd53ae1e8997d147b7c9b2b72cba"}, + {file = "grpcio-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:490fa6d203992c47c7b9e4a9d39003a0c2bcc1c9aa3c058730884bbbb0ee9f09"}, + {file = "grpcio-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:479496325ce554792dba6548fae3df31a72cef7bad71ca2e12b0e58f9b336bfc"}, + {file = "grpcio-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1c9b93f79f48b03ada57ea24725d83a30284a012ec27eab2cf7e50a550cbbbcc"}, + {file = "grpcio-1.76.0-cp314-cp314-win32.whl", hash = "sha256:747fa73efa9b8b1488a95d0ba1039c8e2dca0f741612d80415b1e1c560febf4e"}, + {file = "grpcio-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:922fa70ba549fce362d2e2871ab542082d66e2aaf0c19480ea453905b01f384e"}, + {file = "grpcio-1.76.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:8ebe63ee5f8fa4296b1b8cfc743f870d10e902ca18afc65c68cf46fd39bb0783"}, + {file = "grpcio-1.76.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:3bf0f392c0b806905ed174dcd8bdd5e418a40d5567a05615a030a5aeddea692d"}, + {file = "grpcio-1.76.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b7604868b38c1bfd5cf72d768aedd7db41d78cb6a4a18585e33fb0f9f2363fd"}, + {file = "grpcio-1.76.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e6d1db20594d9daba22f90da738b1a0441a7427552cc6e2e3d1297aeddc00378"}, + {file = "grpcio-1.76.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d099566accf23d21037f18a2a63d323075bebace807742e4b0ac210971d4dd70"}, + {file = "grpcio-1.76.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ebea5cc3aa8ea72e04df9913492f9a96d9348db876f9dda3ad729cfedf7ac416"}, + {file = "grpcio-1.76.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0c37db8606c258e2ee0c56b78c62fc9dee0e901b5dbdcf816c2dd4ad652b8b0c"}, + {file = "grpcio-1.76.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ebebf83299b0cb1721a8859ea98f3a77811e35dce7609c5c963b9ad90728f886"}, + {file = "grpcio-1.76.0-cp39-cp39-win32.whl", hash = "sha256:0aaa82d0813fd4c8e589fac9b65d7dd88702555f702fb10417f96e2a2a6d4c0f"}, + {file = "grpcio-1.76.0-cp39-cp39-win_amd64.whl", hash = "sha256:acab0277c40eff7143c2323190ea57b9ee5fd353d8190ee9652369fae735668a"}, + {file = "grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73"}, +] + +[package.dependencies] +typing-extensions = ">=4.12,<5.0" + +[package.extras] +protobuf = ["grpcio-tools (>=1.76.0)"] + +[[package]] +name = "grpcio-status" +version = "1.76.0" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "grpcio_status-1.76.0-py3-none-any.whl", hash = "sha256:380568794055a8efbbd8871162df92012e0228a5f6dffaf57f2a00c534103b18"}, + {file = "grpcio_status-1.76.0.tar.gz", hash = "sha256:25fcbfec74c15d1a1cb5da3fab8ee9672852dc16a5a9eeb5baf7d7a9952943cd"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.76.0" +protobuf = ">=6.31.1,<7.0.0" + [[package]] name = "gunicorn" version = "23.0.0" @@ -3472,6 +4045,21 @@ http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "humanfriendly" +version = "10.0" +description = "Human friendly output for text interfaces using Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +files = [ + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, +] + +[package.dependencies] +pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} + [[package]] name = "hyperframe" version = "6.1.0" @@ -3715,6 +4303,25 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "jsonpickle" +version = "4.1.1" +description = "jsonpickle encodes/decodes any Python object to/from JSON" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "jsonpickle-4.1.1-py3-none-any.whl", hash = "sha256:bb141da6057898aa2438ff268362b126826c812a1721e31cf08a6e142910dc91"}, + {file = "jsonpickle-4.1.1.tar.gz", hash = "sha256:f86e18f13e2b96c1c1eede0b7b90095bbb61d99fedc14813c44dc2f361dbbae1"}, +] + +[package.extras] +cov = ["pytest-cov"] +dev = ["black", "pyupgrade"] +docs = ["furo", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +packaging = ["build", "setuptools (>=61.2)", "setuptools_scm[toml] (>=6.0)", "twine"] +testing = ["PyYAML", "atheris (>=2.3.0,<2.4.0) ; python_version < \"3.12\"", "bson", "ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=6.0,!=8.1.*)", "pytest-benchmark", "pytest-benchmark[histogram]", "pytest-checkdocs (>=1.2.3)", "pytest-enabler (>=1.0.1)", "pytest-ruff (>=0.2.1)", "scikit-learn", "scipy (>=1.9.3) ; python_version > \"3.10\"", "scipy ; python_version <= \"3.10\"", "simplejson", "sqlalchemy", "ujson"] + [[package]] name = "jsonschema" version = "4.23.0" @@ -3863,6 +4470,26 @@ files = [ {file = "kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d"}, ] +[[package]] +name = "knack" +version = "0.11.0" +description = "A Command-Line Interface framework" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "knack-0.11.0-py3-none-any.whl", hash = "sha256:6704c867840978a119a193914a90e2e98c7be7dff764c8fcd8a2286c5a978d00"}, + {file = "knack-0.11.0.tar.gz", hash = "sha256:eb6568001e9110b1b320941431c51033d104cc98cda2254a5c2b09ba569fd494"}, +] + +[package.dependencies] +argcomplete = "*" +jmespath = "*" +packaging = "*" +pygments = "*" +pyyaml = "*" +tabulate = "*" + [[package]] name = "kombu" version = "5.5.4" @@ -4176,7 +4803,7 @@ version = "4.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.10" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147"}, {file = "markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3"}, @@ -4271,7 +4898,7 @@ version = "3.26.2" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "marshmallow-3.26.2-py3-none-any.whl", hash = "sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73"}, {file = "marshmallow-3.26.2.tar.gz", hash = "sha256:bbe2adb5a03e6e3571b573f42527c6fe926e17467833660bebd11593ab8dfd57"}, @@ -4382,7 +5009,7 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -4502,21 +5129,38 @@ files = [ [package.dependencies] microsoft-kiota-abstractions = ">=1.9.2,<1.10.0" +[[package]] +name = "microsoft-security-utilities-secret-masker" +version = "1.0.0b4" +description = "A tool for detecting and masking secrets" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "microsoft_security_utilities_secret_masker-1.0.0b4-py3-none-any.whl", hash = "sha256:0429fcaad10fc8ae3f940ab84fd2926e4f50ede134162144123b35937be831a8"}, + {file = "microsoft_security_utilities_secret_masker-1.0.0b4.tar.gz", hash = "sha256:a30bd361ac18c8b52f6844076bc26465335949ea9c7a004d95f5196ec6fdef3e"}, +] + [[package]] name = "msal" -version = "1.33.0" +version = "1.34.0b1" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "msal-1.33.0-py3-none-any.whl", hash = "sha256:c0cd41cecf8eaed733ee7e3be9e040291eba53b0f262d3ae9c58f38b04244273"}, - {file = "msal-1.33.0.tar.gz", hash = "sha256:836ad80faa3e25a7d71015c990ce61f704a87328b1e73bcbb0623a18cbf17510"}, + {file = "msal-1.34.0b1-py3-none-any.whl", hash = "sha256:3b6373325e3509d97873e36965a75e9cc9393f1b579d12cc03c0ca0ef6d37eb4"}, + {file = "msal-1.34.0b1.tar.gz", hash = "sha256:86cdbfec14955e803379499d017056c6df4ed40f717fd6addde94bdeb4babd78"}, ] [package.dependencies] cryptography = ">=2.5,<48" PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} +pymsalruntime = [ + {version = ">=0.14,<0.19", optional = true, markers = "python_version >= \"3.6\" and platform_system == \"Windows\" and extra == \"broker\""}, + {version = ">=0.17,<0.19", optional = true, markers = "python_version >= \"3.8\" and platform_system == \"Darwin\" and extra == \"broker\""}, + {version = ">=0.18,<0.19", optional = true, markers = "python_version >= \"3.8\" and platform_system == \"Linux\" and extra == \"broker\""}, +] requests = ">=2.0.0,<3" [package.extras] @@ -4524,21 +5168,19 @@ broker = ["pymsalruntime (>=0.14,<0.19) ; python_version >= \"3.6\" and platform [[package]] name = "msal-extensions" -version = "1.3.1" +version = "1.2.0" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." optional = false -python-versions = ">=3.9" +python-versions = ">=3.7" groups = ["main"] files = [ - {file = "msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca"}, - {file = "msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4"}, + {file = "msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d"}, + {file = "msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef"}, ] [package.dependencies] msal = ">=1.29,<2" - -[package.extras] -portalocker = ["portalocker (>=1.4,<4)"] +portalocker = ">=1.4,<3" [[package]] name = "msgraph-core" @@ -4606,6 +5248,23 @@ requests-oauthlib = ">=0.5.0" [package.extras] async = ["aiodns ; python_version >= \"3.5\"", "aiohttp (>=3.0) ; python_version >= \"3.5\""] +[[package]] +name = "msrestazure" +version = "0.6.4.post1" +description = "AutoRest swagger generator Python client runtime. Azure-specific module." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "msrestazure-0.6.4.post1-py2.py3-none-any.whl", hash = "sha256:2264493b086c2a0a82ddf5fd87b35b3fffc443819127fed992ac5028354c151e"}, + {file = "msrestazure-0.6.4.post1.tar.gz", hash = "sha256:39842007569e8c77885ace5c46e4bf2a9108fcb09b1e6efdf85b6e2c642b55d4"}, +] + +[package.dependencies] +adal = ">=0.6.0,<2.0.0" +msrest = ">=0.6.0,<2.0.0" +six = "*" + [[package]] name = "multidict" version = "6.6.4" @@ -4810,6 +5469,26 @@ pyspark = ["pyspark (>=3.5.0)"] pyspark-connect = ["pyspark[connect] (>=3.5.0)"] sqlframe = ["sqlframe (>=3.22.0)"] +[[package]] +name = "neo4j" +version = "5.28.2" +description = "Neo4j Bolt driver for Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "neo4j-5.28.2-py3-none-any.whl", hash = "sha256:5c53b5c3eee6dee7e920c9724391aa38d7135a651e71b766da00533b92a91a94"}, + {file = "neo4j-5.28.2.tar.gz", hash = "sha256:7d38e27e4f987a45cc9052500c6ee27325cb23dae6509037fe31dd7ddaed70c7"}, +] + +[package.dependencies] +pytz = "*" + +[package.extras] +numpy = ["numpy (>=1.7.0,<3.0.0)"] +pandas = ["numpy (>=1.7.0,<3.0.0)", "pandas (>=1.1.0,<3.0.0)"] +pyarrow = ["pyarrow (>=1.0.0)"] + [[package]] name = "nest-asyncio" version = "1.6.0" @@ -4917,6 +5596,22 @@ pytz = ">=2016.10" [package.extras] adk = ["docstring-parser (>=0.16) ; python_version >= \"3.10\" and python_version < \"4\"", "mcp (>=1.6.0) ; python_version >= \"3.10\" and python_version < \"4\"", "pydantic (>=2.10.6) ; python_version >= \"3.10\" and python_version < \"4\"", "rich (>=13.9.4) ; python_version >= \"3.10\" and python_version < \"4\""] +[[package]] +name = "okta" +version = "0.0.4" +description = "Okta client APIs" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "okta-0.0.4.tar.gz", hash = "sha256:53e792c68d3684ff4140b4cb1c02af3821090368f8110fde54c0bdb638449332"}, +] + +[package.dependencies] +python-dateutil = ">=2.4.2" +requests = ">=2.5.3" +six = ">=1.9.0" + [[package]] name = "openai" version = "1.101.0" @@ -5107,6 +5802,22 @@ files = [ [package.dependencies] setuptools = "*" +[[package]] +name = "pdpyras" +version = "5.4.1" +description = "PagerDuty Python REST API Sessions." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "pdpyras-5.4.1-py2.py3-none-any.whl", hash = "sha256:e16020cf57e4c916ab3dace7c7dffe21a2e7059ab7411ce3ddf1e620c54e9c89"}, + {file = "pdpyras-5.4.1.tar.gz", hash = "sha256:36021aff5979a79f1d87edc95e0c46e98ce8549292bc0cab3d9f33501795703b"}, +] + +[package.dependencies] +requests = "*" +urllib3 = "*" + [[package]] name = "pillow" version = "11.3.0" @@ -5232,6 +5943,21 @@ tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "ole typing = ["typing-extensions ; python_version < \"3.10\""] xmp = ["defusedxml"] +[[package]] +name = "pkginfo" +version = "1.12.1.2" +description = "Query metadata from sdists / bdists / installed packages." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pkginfo-1.12.1.2-py3-none-any.whl", hash = "sha256:c783ac885519cab2c34927ccfa6bf64b5a704d7c69afaea583dd9b7afe969343"}, + {file = "pkginfo-1.12.1.2.tar.gz", hash = "sha256:5cd957824ac36f140260964eba3c6be6442a8359b8c48f4adf90210f33a04b7b"}, +] + +[package.extras] +testing = ["pytest", "pytest-cov", "wheel"] + [[package]] name = "platformdirs" version = "4.3.8" @@ -5289,6 +6015,42 @@ files = [ dev = ["pre-commit", "tox"] testing = ["coverage", "pytest", "pytest-benchmark"] +[[package]] +name = "policyuniverse" +version = "1.5.1.20231109" +description = "Parse and Process AWS IAM Policies, Statements, ARNs, and wildcards." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "policyuniverse-1.5.1.20231109-py2.py3-none-any.whl", hash = "sha256:0b0ece0ee8285af31fc39ce09c82a551ca62e62bc2842e23952503bccb973321"}, + {file = "policyuniverse-1.5.1.20231109.tar.gz", hash = "sha256:74e56d410560915c2c5132e361b0130e4bffe312a2f45230eac50d7c094bc40a"}, +] + +[package.extras] +dev = ["black", "pre-commit"] +tests = ["bandit", "coveralls", "pytest"] + +[[package]] +name = "portalocker" +version = "2.10.1" +description = "Wraps the portalocker recipe for easy usage" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, + {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, +] + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[package.extras] +docs = ["sphinx (>=1.7.1)"] +redis = ["redis"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] + [[package]] name = "prompt-toolkit" version = "3.0.51" @@ -5502,8 +6264,9 @@ azure-mgmt-subscription = "3.1.1" azure-mgmt-web = "8.0.0" azure-monitor-query = "2.0.0" azure-storage-blob = "12.24.1" -boto3 = "1.39.15" -botocore = "1.39.15" +boto3 = "1.40.61" +botocore = "1.40.61" +cloudflare = "4.3.1" colorama = "0.4.6" cryptography = "44.0.1" dash = "3.1.1" @@ -5529,7 +6292,7 @@ python-dateutil = ">=2.9.0.post0,<3.0.0" pytz = "2025.1" schema = "0.7.5" shodan = "1.31.0" -slack-sdk = "3.34.0" +slack-sdk = "3.39.0" tabulate = "0.9.0" tzlocal = "5.3.1" @@ -5537,7 +6300,7 @@ tzlocal = "5.3.1" type = "git" url = "https://github.com/prowler-cloud/prowler.git" reference = "master" -resolved_reference = "d7f0b5b19094f92b460ee266935d2db4c4ad1de9" +resolved_reference = "2c4f866e42c25973d6eb3dc7d58e7ce72b1ed3f0" [[package]] name = "psutil" @@ -5651,6 +6414,18 @@ files = [ {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, ] +[[package]] +name = "py-deviceid" +version = "0.1.1" +description = "A simple library to get or create a unique device id for a device in Python." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "py_deviceid-0.1.1-py3-none-any.whl", hash = "sha256:c0e32815e87a08087a0811c18f4402ee88b28a321f997753d75ecdaab570321b"}, + {file = "py_deviceid-0.1.1.tar.gz", hash = "sha256:c3e7577ada23666e7f39e69370dfdaa76fe9de79c02635376d6aa0229bfa30e3"}, +] + [[package]] name = "py-iam-expand" version = "0.1.0" @@ -5742,7 +6517,7 @@ description = "PycURL -- A Python Interface To The cURL library" optional = false python-versions = ">=3.5" groups = ["main"] -markers = "sys_platform != \"win32\" and platform_python_implementation == \"CPython\"" +markers = "platform_python_implementation == \"CPython\" and sys_platform != \"win32\"" files = [ {file = "pycurl-7.45.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c31b390f1e2cd4525828f1bb78c1f825c0aab5d1588228ed71b22c4784bdb593"}, {file = "pycurl-7.45.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:942b352b69184cb26920db48e0c5cb95af39874b57dbe27318e60f1e68564e37"}, @@ -5937,7 +6712,7 @@ version = "2.19.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, @@ -5995,6 +6770,47 @@ tomlkit = ">=0.10.1" spelling = ["pyenchant (>=3.2,<4.0)"] testutils = ["gitpython (>3)"] +[[package]] +name = "pymsalruntime" +version = "0.18.1" +description = "The MSALRuntime Python Interop Package" +optional = false +python-versions = ">=3.6" +groups = ["main"] +markers = "(platform_system == \"Windows\" or platform_system == \"Darwin\" or platform_system == \"Linux\") and sys_platform == \"win32\"" +files = [ + {file = "pymsalruntime-0.18.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0c22e2e83faa10de422bbfaacc1bb2887c9025ee8a53f0fc2e4f7db01c4a7b66"}, + {file = "pymsalruntime-0.18.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:8ce2944a0f944833d047bb121396091e00287e2b6373716106da86ea99abf379"}, + {file = "pymsalruntime-0.18.1-cp310-cp310-manylinux_2_35_x86_64.whl", hash = "sha256:9f7945ae0ee78357e9ca87d381f1c19763629a7197391ae7f84f4967a9f06e5b"}, + {file = "pymsalruntime-0.18.1-cp310-cp310-win32.whl", hash = "sha256:10020abdfc34bbbf3414b86359de551d2d8bc7c241bc38c59a2468c4d49f21d5"}, + {file = "pymsalruntime-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:f9aec2f44470d71feae35b611d1d8f15a549d96446e4f60e1ca1fb71856fffed"}, + {file = "pymsalruntime-0.18.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:e9320fb187fe1298d2165fa248af00907ca15d3a903a1d35fed86f6bc20b5880"}, + {file = "pymsalruntime-0.18.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9b2cecf3a570b7812d2007764df6dfbc27fca401a0d74532d5403aa20a9ef380"}, + {file = "pymsalruntime-0.18.1-cp311-cp311-manylinux_2_35_x86_64.whl", hash = "sha256:6f66fd99668abc3d4b8d93a9eb80c75178dc63186c79e6dbe133427b279835e0"}, + {file = "pymsalruntime-0.18.1-cp311-cp311-win32.whl", hash = "sha256:74416947b1071054f3258cac3448a7adf708888727bf283267df2bb27f0998f1"}, + {file = "pymsalruntime-0.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:beb926655aae3367b7e4bda2baad86f9271beefee1121f71642da0ed4de37fd2"}, + {file = "pymsalruntime-0.18.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:a6c07651cf4e07690d1b022da0977f56820ef553ac6dcbf4c9e68e9611020997"}, + {file = "pymsalruntime-0.18.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:0b6c4f54ec13309cc7b717ac8760c2d9856d4924cefa2b794b6d03db4cfdeef8"}, + {file = "pymsalruntime-0.18.1-cp312-cp312-manylinux_2_35_x86_64.whl", hash = "sha256:06c73a47f024fcf36006b89fe32f2f6f6a004aa661cf8a03d3e496d1ef84cfe8"}, + {file = "pymsalruntime-0.18.1-cp312-cp312-win32.whl", hash = "sha256:ace12bf9b7fcbf1bf21a03c227717e09ba99acd9190623fe0821a08832ece4eb"}, + {file = "pymsalruntime-0.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:f9fd8ea52395f52f7d62498e47754adf2bfe6530816ff57eff1ba6f524aee51b"}, + {file = "pymsalruntime-0.18.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:047a98b6709cddf6a1f50f78ee16d06fea0f42a44971b6d3e2988537277a1a17"}, + {file = "pymsalruntime-0.18.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:910e653c65cd66fa9ce46dec103d3948da2276f7d4d315631a145eaab968d9a8"}, + {file = "pymsalruntime-0.18.1-cp313-cp313-manylinux_2_35_x86_64.whl", hash = "sha256:7ae0b160983ea0715d8ac69b441bbd29e7a9f31c9a5a2c350c79a794f5599f38"}, + {file = "pymsalruntime-0.18.1-cp313-cp313-win32.whl", hash = "sha256:adf4200a1b423fe5d8e984c142cc64f0b76a9b0f7f8ff767490a2dde94fa642b"}, + {file = "pymsalruntime-0.18.1-cp313-cp313-win_amd64.whl", hash = "sha256:5a759aa551d084b160799f6df59c9891898ab305eb75ff1705bf04281675eb4b"}, + {file = "pymsalruntime-0.18.1-cp38-cp38-macosx_14_0_arm64.whl", hash = "sha256:12b8990c4da1327ea46f6271bd57b28a90d3e795deacb370052914c3ff40d4c5"}, + {file = "pymsalruntime-0.18.1-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:8dd68f9fedc200950093378b30a2ade4517324cef060788a759b575ea58dc6b2"}, + {file = "pymsalruntime-0.18.1-cp38-cp38-manylinux_2_35_x86_64.whl", hash = "sha256:7183b1b1542a277db119fe55285c7609c661b8506b99cd7e53b7066ce6b838e4"}, + {file = "pymsalruntime-0.18.1-cp38-cp38-win32.whl", hash = "sha256:56c3d708ba86311f049b004de81aa97655fed82782d3ec67e14ae1e27d4f5e5b"}, + {file = "pymsalruntime-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:a8adc80fcf723b980976b81a0b409affe80f32d89ae6096d856fd20471d2f0c1"}, + {file = "pymsalruntime-0.18.1-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:600d0f2b9b03dfb457ee1e13f191c2c217c0f6bceca512f1741e5215bc4bc5dc"}, + {file = "pymsalruntime-0.18.1-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:daae8515ae8adac8662d8230f22af242f87c72d86f308ec51b7432f316199c1b"}, + {file = "pymsalruntime-0.18.1-cp39-cp39-manylinux_2_35_x86_64.whl", hash = "sha256:864b8b9555a180c6baf8a57df3976b2e511582d54099561fbfe73f9f0b95c9f5"}, + {file = "pymsalruntime-0.18.1-cp39-cp39-win32.whl", hash = "sha256:b90a3c8079ded9d5abc765bd90fdc34f6e49412793740ddbc6122a601008d50f"}, + {file = "pymsalruntime-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:852dc82b3eaad0cce2c583314705183bf216e7fa7178040defd3a13195c1c406"}, +] + [[package]] name = "pynacl" version = "1.6.2" @@ -6071,6 +6887,35 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pyreadline3" +version = "3.5.4" +description = "A python implementation of GNU readline." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, + {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, +] + +[package.extras] +dev = ["build", "flake8", "mypy", "pytest", "twine"] + +[[package]] +name = "pysocks" +version = "1.7.1" +description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +files = [ + {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, + {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, + {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"}, +] + [[package]] name = "pytest" version = "8.2.2" @@ -6264,6 +7109,22 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-digitalocean" +version = "1.17.0" +description = "digitalocean.com API to manage Droplets and Images" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "python-digitalocean-1.17.0.tar.gz", hash = "sha256:107854fde1aafa21774e8053cf253b04173613c94531f75d5a039ad770562b24"}, + {file = "python_digitalocean-1.17.0-py3-none-any.whl", hash = "sha256:0032168e022e85fca314eb3f8dfaabf82087f2ed40839eb28f1eeeeca5afb1fa"}, +] + +[package.dependencies] +jsonpickle = "*" +requests = "*" + [[package]] name = "python-memcached" version = "1.62" @@ -6316,7 +7177,6 @@ description = "Python for Window Extensions" optional = false python-versions = "*" groups = ["main", "dev"] -markers = "sys_platform == \"win32\"" files = [ {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"}, {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"}, @@ -6339,6 +7199,7 @@ files = [ {file = "pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91"}, {file = "pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d"}, ] +markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "sys_platform == \"win32\""} [[package]] name = "pyyaml" @@ -6479,6 +7340,7 @@ files = [ certifi = ">=2017.4.17" charset_normalizer = ">=2,<4" idna = ">=2.5,<4" +PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7", optional = true, markers = "extra == \"socks\""} urllib3 = ">=1.21.1,<3" [package.extras] @@ -6537,7 +7399,7 @@ version = "14.1.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f"}, {file = "rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8"}, @@ -6836,14 +7698,14 @@ files = [ [[package]] name = "s3transfer" -version = "0.13.1" +version = "0.14.0" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "s3transfer-0.13.1-py3-none-any.whl", hash = "sha256:a981aa7429be23fe6dfc13e80e4020057cbab622b08c0315288758d67cabc724"}, - {file = "s3transfer-0.13.1.tar.gz", hash = "sha256:c3fdba22ba1bd367922f27ec8032d6a1cf5f10c934fb5d68cf60fd5a23d936cf"}, + {file = "s3transfer-0.14.0-py3-none-any.whl", hash = "sha256:ea3b790c7077558ed1f02a3072fb3cb992bbbd253392f4b6e9e8976941c7d456"}, + {file = "s3transfer-0.14.0.tar.gz", hash = "sha256:eff12264e7c8b4985074ccce27a3b38a485bb7f7422cc8046fee9be4983e4125"}, ] [package.dependencies] @@ -6907,6 +7769,38 @@ pydantic = "*" ruamel-yaml = ">=0.17.21" typing-extensions = ">=4.7.1" +[[package]] +name = "scaleway" +version = "2.10.3" +description = "Scaleway SDK for Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "scaleway-2.10.3-py3-none-any.whl", hash = "sha256:dbf381440d6caf37c878cf16445a63f4969a4aac2257c9b72c744d10ff223a0c"}, + {file = "scaleway-2.10.3.tar.gz", hash = "sha256:b1f9dd1b1450767205234c6f5a345e5e25dc039c780253d698893b5c344ce594"}, +] + +[package.dependencies] +scaleway-core = "2.10.3" + +[[package]] +name = "scaleway-core" +version = "2.10.3" +description = "Scaleway SDK for Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "scaleway_core-2.10.3-py3-none-any.whl", hash = "sha256:fd4112144554d6adae22ff737555eeb0e38cb1063250b3e88c9aebc1b957793b"}, + {file = "scaleway_core-2.10.3.tar.gz", hash = "sha256:56432f755d694669429de51d51c1d0b3361b28dc2f939b28e4cb954610ee76be"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.2,<3.0.0" +PyYAML = ">=6.0,<7.0" +requests = ">=2.28.1,<3.0.0" + [[package]] name = "schema" version = "0.7.5" @@ -7007,7 +7901,7 @@ version = "1.5.4" description = "Tool to Detect Surrounding Shell" optional = false python-versions = ">=3.7" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, @@ -7046,18 +7940,18 @@ files = [ [[package]] name = "slack-sdk" -version = "3.34.0" +version = "3.39.0" description = "The Slack API Platform SDK for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" groups = ["main"] files = [ - {file = "slack_sdk-3.34.0-py2.py3-none-any.whl", hash = "sha256:c61f57f310d85be83466db5a98ab6ae3bb2e5587437b54fa0daa8fae6a0feffa"}, - {file = "slack_sdk-3.34.0.tar.gz", hash = "sha256:ff61db7012160eed742285ea91f11c72b7a38a6500a7f6c5335662b4bc6b853d"}, + {file = "slack_sdk-3.39.0-py2.py3-none-any.whl", hash = "sha256:b1556b2f5b8b12b94e5ea3f56c4f2c7f04462e4e1013d325c5764ff118044fa8"}, + {file = "slack_sdk-3.39.0.tar.gz", hash = "sha256:6a56be10dc155c436ff658c6b776e1c082e29eae6a771fccf8b0a235822bbcb1"}, ] [package.extras] -optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<15)"] +optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<16)"] [[package]] name = "sniffio" @@ -7087,6 +7981,18 @@ files = [ dev = ["build"] doc = ["sphinx"] +[[package]] +name = "statsd" +version = "4.0.1" +description = "A simple statsd client." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "statsd-4.0.1-py2.py3-none-any.whl", hash = "sha256:c2676519927f7afade3723aca9ca8ea986ef5b059556a980a867721ca69df093"}, + {file = "statsd-4.0.1.tar.gz", hash = "sha256:99763da81bfea8daf6b3d22d11aaccb01a8d0f52ea521daab37e758a4ca7d128"}, +] + [[package]] name = "std-uritemplate" version = "2.0.5" @@ -7207,7 +8113,7 @@ version = "0.16.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "typer-0.16.1-py3-none-any.whl", hash = "sha256:90ee01cb02d9b8395ae21ee3368421faf21fa138cb2a541ed369c08cec5237c9"}, {file = "typer-0.16.1.tar.gz", hash = "sha256:d358c65a464a7a90f338e3bb7ff0c74ac081449e53884b12ba658cbd72990614"}, @@ -7219,6 +8125,21 @@ rich = ">=10.11.0" shellingham = ">=1.3.0" typing-extensions = ">=3.7.4.3" +[[package]] +name = "types-aiobotocore-ecr" +version = "3.0.0" +description = "Type annotations for aiobotocore ECR 3.0.0 service generated with mypy-boto3-builder 8.12.0" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "types_aiobotocore_ecr-3.0.0-py3-none-any.whl", hash = "sha256:06288369b9ddf78661224ac99a61aefe3c8a49e872d5c7a1626435ea848a817e"}, + {file = "types_aiobotocore_ecr-3.0.0.tar.gz", hash = "sha256:a9f49aa3c83c6b6ab1cc7f10cc887ca35a549e0a29dfcdab40b285ce0846d06c"}, +] + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.12\""} + [[package]] name = "typing-extensions" version = "4.14.1" @@ -7564,6 +8485,21 @@ files = [ [package.dependencies] lxml = ">=3.8" +[[package]] +name = "xmltodict" +version = "1.0.2" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d"}, + {file = "xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649"}, +] + +[package.extras] +test = ["pytest", "pytest-cov"] + [[package]] name = "yarl" version = "1.20.1" @@ -7767,141 +8703,179 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"] [[package]] name = "zstd" -version = "1.5.7.2" +version = "1.5.7.3" description = "ZSTD Bindings for Python" optional = false python-versions = "*" groups = ["main"] files = [ - {file = "zstd-1.5.7.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e17104d0e88367a7571dde4286e233126c8551691ceff11f9ae2e3a3ac1bb483"}, - {file = "zstd-1.5.7.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:d6ee5dfada4c8fa32f43cc092fcf7d8482da6ad242c22fdf780f7eebd0febcc7"}, - {file = "zstd-1.5.7.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:ae1100776cb400100e2d2f427b50dc983c005c38cd59502eb56d2cfea3402ad5"}, - {file = "zstd-1.5.7.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:489a0ff15caf7640851e63f85b680c4279c99094cd500a29c7ed3ab82505fce0"}, - {file = "zstd-1.5.7.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:92590cf54318849d492445c885f1a42b9dbb47cdc070659c7cb61df6e8531047"}, - {file = "zstd-1.5.7.2-cp27-cp27mu-manylinux_2_4_i686.whl", hash = "sha256:2bc21650f7b9c058a3c4cb503e906fe9cce293941ec1b48bc5d005c3b4422b42"}, - {file = "zstd-1.5.7.2-cp27-cp27mu-manylinux_2_4_x86_64.whl", hash = "sha256:7b13e7eef9aa192804d38bf413924d347c6f6c6ac07f5a0c1ae4a6d7b3af70f0"}, - {file = "zstd-1.5.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d3f14c5c405ea353b68fe105236780494eb67c756ecd346fd295498f5eab6d24"}, - {file = "zstd-1.5.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07d2061df22a3efc06453089e6e8b96e58f5bb7a0c4074dcfd0b0ce243ddde72"}, - {file = "zstd-1.5.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:27e55aa2043ba7d8a08aba0978c652d4d5857338a8188aa84522569f3586c7bb"}, - {file = "zstd-1.5.7.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e97933addfd71ea9608306f18dc18e7d2a5e64212ba2bb9a4ccb6d714f9f280"}, - {file = "zstd-1.5.7.2-cp310-cp310-manylinux_2_4_i686.whl", hash = "sha256:27e2ed58b64001c9ef0a8e028625477f1a6ed4ca949412ff6548544945cc59c2"}, - {file = "zstd-1.5.7.2-cp310-cp310-manylinux_2_4_x86_64.whl", hash = "sha256:92f072819fc0c7e8445f51a232c9ad76642027c069d2f36470cdb5e663839cdb"}, - {file = "zstd-1.5.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:2a653cdd2c52d60c28e519d44bde8d759f2c1837f0ff8e8e1b0045ca62fcf70e"}, - {file = "zstd-1.5.7.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:047803d87d910f4905f48d99aeff1e0539ec2e4f4bf17d077701b5d0b2392a95"}, - {file = "zstd-1.5.7.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0d8c1dc947e5ccea3bd81043080213685faf1d43886c27c51851fabf325f05c0"}, - {file = "zstd-1.5.7.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8291d393321fac30604c6bbf40067103fee315aa476647a5eaecf877ee53496f"}, - {file = "zstd-1.5.7.2-cp310-cp310-win32.whl", hash = "sha256:6922ceac5f2d60bb57a7875168c8aa442477b83e8951f2206cf1e9be788b0a6e"}, - {file = "zstd-1.5.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:346d1e4774d89a77d67fc70d53964bfca57c0abecfd885a4e00f87fd7c71e074"}, - {file = "zstd-1.5.7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f799c1e9900ad77e7a3d994b9b5146d7cfd1cbd1b61c3db53a697bf21ffcc57b"}, - {file = "zstd-1.5.7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ff4c667f29101566a7b71f06bbd677a63192818396003354131f586383db042"}, - {file = "zstd-1.5.7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8526a32fa9f67b07fd09e62474e345f8ca1daf3e37a41137643d45bd1bc90773"}, - {file = "zstd-1.5.7.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:2cec2472760d48a7a3445beaba509d3f7850e200fed65db15a1a66e315baec6a"}, - {file = "zstd-1.5.7.2-cp311-cp311-manylinux_2_4_i686.whl", hash = "sha256:a200c479ee1bb661bc45518e016a1fdc215a1d8f7e4bf6c7de0af254976cfdf6"}, - {file = "zstd-1.5.7.2-cp311-cp311-manylinux_2_4_x86_64.whl", hash = "sha256:f5d159e57a13147aa8293c0f14803a75e9039fd8afdf6cf1c8c2289fb4d2333a"}, - {file = "zstd-1.5.7.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:7206934a2bd390080e972a1fed5a897e184dfd71dbb54e978dc11c6b295e1806"}, - {file = "zstd-1.5.7.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e0027b20f296d1c9a8e85b8436834cf46560240a29d623aa8eaa8911832eb58"}, - {file = "zstd-1.5.7.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d6b17e5581dd1a13437079bd62838d2635db8eb8aca9c0e9251faa5d4d40a6d7"}, - {file = "zstd-1.5.7.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b13285c99cc710f60dd270785ec75233018870a1831f5655d862745470a0ca29"}, - {file = "zstd-1.5.7.2-cp311-cp311-win32.whl", hash = "sha256:cdb5ec80da299f63f8aeccec0bff3247e96252d4c8442876363ff1b438d8049b"}, - {file = "zstd-1.5.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:4f6861c8edceb25fda37cdaf422fc5f15dcc88ced37c6a5b3c9011eda51aa218"}, - {file = "zstd-1.5.7.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ebe3e60dbace52525fa7aa604479e231dc3e4fcc76d0b4c54d8abce5e58734"}, - {file = "zstd-1.5.7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ef201b6f7d3a6751d85cc52f9e6198d4d870e83d490172016b64a6dd654a9583"}, - {file = "zstd-1.5.7.2-cp312-cp312-manylinux_2_14_x86_64.whl", hash = "sha256:ac7bdfedda51b1fcdcf0ab69267d01256fc97ddf666ce894fde0fae9f3630eac"}, - {file = "zstd-1.5.7.2-cp312-cp312-manylinux_2_4_i686.whl", hash = "sha256:b835405cc4080b378e45029f2fe500e408d1eaedfba7dd7402aba27af16955f9"}, - {file = "zstd-1.5.7.2-cp312-cp312-win32.whl", hash = "sha256:e4cf97bb97ed6dbb62d139d68fd42fa1af51fd26fd178c501f7b62040e897c50"}, - {file = "zstd-1.5.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:55e2edc4560a5cf8ee9908595e90a15b1f47536ea9aad4b2889f0e6165890a38"}, - {file = "zstd-1.5.7.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6e684e27064b6550aa2e7dc85d171ea1b62cb5930a2c99b3df9b30bf620b5c06"}, - {file = "zstd-1.5.7.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fd6262788a98807d6b2befd065d127db177c1cd76bb8e536e0dded419eb7c7fb"}, - {file = "zstd-1.5.7.2-cp313-cp313-manylinux_2_14_x86_64.whl", hash = "sha256:53948be45f286a1b25c07a6aa2aca5c902208eb3df9fe36cf891efa0394c8b71"}, - {file = "zstd-1.5.7.2-cp313-cp313-win32.whl", hash = "sha256:edf816c218e5978033b7bb47dcb453dfb71038cb8a9bf4877f3f823e74d58174"}, - {file = "zstd-1.5.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:eea9bddf06f3f5e1e450fd647665c86df048a45e8b956d53522387c1dff41b7a"}, - {file = "zstd-1.5.7.2-cp313-cp313t-manylinux_2_14_x86_64.whl", hash = "sha256:1d71f9f92b3abe18b06b5f0aefa5b9c42112beef3bff27e36028d147cb4426a6"}, - {file = "zstd-1.5.7.2-cp314-cp314-manylinux_2_14_x86_64.whl", hash = "sha256:a6105b8fa21dbc59e05b6113e8e5d5aaf56c5d2886aa5778d61030af3256bbb7"}, - {file = "zstd-1.5.7.2-cp314-cp314t-manylinux_2_14_x86_64.whl", hash = "sha256:d0b0ca097efb5f67157c61a744c926848dcccf6e913df2f814e719aa78197a4b"}, - {file = "zstd-1.5.7.2-cp34-cp34m-manylinux_2_4_i686.whl", hash = "sha256:a371274668182ae06be2e321089b207fa0a75a58ae2fd4dfb7eafded9e041b2f"}, - {file = "zstd-1.5.7.2-cp34-cp34m-manylinux_2_4_x86_64.whl", hash = "sha256:74c3f006c9a3a191ed454183f0fb78172444f5cb431be04d85044a27f1b58c7b"}, - {file = "zstd-1.5.7.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:f19a3e658d92b6b52020c4c6d4c159480bcd3b47658773ea0e8d343cee849f33"}, - {file = "zstd-1.5.7.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d9d1bcb6441841c599883139c1b0e47bddb262cce04b37dc2c817da5802c1158"}, - {file = "zstd-1.5.7.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:bb1cb423fc40468cc9b7ab51a5b33c618eefd2c910a5bffed6ed76fe1cbb20b0"}, - {file = "zstd-1.5.7.2-cp35-cp35m-manylinux_2_14_x86_64.whl", hash = "sha256:e2476ba12597e58c5fc7a3ae547ee1bef9dd6b9d5ea80cf8d4034930c5a336e0"}, - {file = "zstd-1.5.7.2-cp35-cp35m-manylinux_2_4_i686.whl", hash = "sha256:2bf6447373782a2a9df3015121715f6d0b80a49a884c2d7d4518c9571e9fca16"}, - {file = "zstd-1.5.7.2-cp35-cp35m-win32.whl", hash = "sha256:a59a136a9eaa1849d715c004e30344177e85ad6e7bc4a5d0b6ad2495c5402675"}, - {file = "zstd-1.5.7.2-cp35-cp35m-win_amd64.whl", hash = "sha256:114115af8c68772a3205414597f626b604c7879f6662a2a79c88312e0f50361f"}, - {file = "zstd-1.5.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f576ec00e99db124309dac1e1f34bc320eb69624189f5fdaf9ebe1dc81581a84"}, - {file = "zstd-1.5.7.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:f97d8593da0e23a47f148a1cb33300dccd513fb0df9f7911c274e228a8c1a300"}, - {file = "zstd-1.5.7.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:a130243e875de5aeda6099d12b11bc2fcf548dce618cf6b17f731336ba5338e4"}, - {file = "zstd-1.5.7.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:73cec37649fda383348dc8b3b5fba535f1dbb1bbaeb60fd36f4c145820208619"}, - {file = "zstd-1.5.7.2-cp36-cp36m-manylinux_2_14_x86_64.whl", hash = "sha256:883e7b77a3124011b8badd0c7c9402af3884700a3431d07877972e157d85afb8"}, - {file = "zstd-1.5.7.2-cp36-cp36m-manylinux_2_4_i686.whl", hash = "sha256:b5af6aa041b5515934afef2ef4af08566850875c3c890109088eedbe190eeefb"}, - {file = "zstd-1.5.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:53abf577aec7b30afa3c024143f4866676397c846b44f1b30d8097b5e4f5c7d7"}, - {file = "zstd-1.5.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:660945ba16c16957c94dafc40aff1db02a57af0489aa3a896866239d47bb44b0"}, - {file = "zstd-1.5.7.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:3e220d2d7005822bb72a52e76410ca4634f941d8062c08e8e3285733c63b1db7"}, - {file = "zstd-1.5.7.2-cp37-cp37m-manylinux_2_4_i686.whl", hash = "sha256:7e998f86a9d1e576c0158bf0b0a6a5c4685679d74ba0053a2e87f684f9bdc8eb"}, - {file = "zstd-1.5.7.2-cp37-cp37m-manylinux_2_4_x86_64.whl", hash = "sha256:70d0c4324549073e05aa72e9eb6a593f89cba59da804b946d325d68467b93ad5"}, - {file = "zstd-1.5.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:b9518caabf59405eddd667bbb161d9ae7f13dbf96967fd998d095589c8d41c86"}, - {file = "zstd-1.5.7.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:30d339d8e5c4b14c2015b50371fcdb8a93b451ca6d3ef813269ccbb8b3b3ef7d"}, - {file = "zstd-1.5.7.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:6f5539a10b838ee576084870eed65b63c13845e30a5b552cfe40f7e6b621e61a"}, - {file = "zstd-1.5.7.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:5540ce1c99fa0b59dad2eff771deb33872754000da875be50ac8c2beab42b433"}, - {file = "zstd-1.5.7.2-cp37-cp37m-win32.whl", hash = "sha256:56c4b8cd0a88fd721213661c28b87b64fbd14b6019df39b21b0117a68162b0f2"}, - {file = "zstd-1.5.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:594f256fa72852ade60e3acb909f983d5cf6839b9fc79728dd4b48b31112058f"}, - {file = "zstd-1.5.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9dc05618eb0abceb296b77e5f608669c12abc69cbf447d08151bcb14d290ab07"}, - {file = "zstd-1.5.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:70231ba799d681b6fc17456c3e39895c493b5dff400aa7842166322a952b7f2a"}, - {file = "zstd-1.5.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5a73f0f20f71d4eef970a3fed7baac64d9a2a00b238acc4eca2bd7172bd7effb"}, - {file = "zstd-1.5.7.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0a470f8938f69f632b8f88b96578a5e8825c18ddbbea7de63493f74874f963ef"}, - {file = "zstd-1.5.7.2-cp38-cp38-manylinux_2_4_i686.whl", hash = "sha256:d104f1cb2a7c142007c29a2a62dfe633155c648317a465674e583c295e5f792d"}, - {file = "zstd-1.5.7.2-cp38-cp38-manylinux_2_4_x86_64.whl", hash = "sha256:70f29e0504fc511d4b9f921e69637fca79c050e618ba23732a3f75c044814d89"}, - {file = "zstd-1.5.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:a62c2f6f7b8fc69767392084828740bd6faf35ff54d4ccb2e90e199327c64140"}, - {file = "zstd-1.5.7.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f2dda0c76f87723fb7f75d7ad3bbd90f7fb47b75051978d22535099325111b41"}, - {file = "zstd-1.5.7.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f9cf09c2aa6f67750fe9f33fdd122f021b1a23bf7326064a8e21f7af7e77faee"}, - {file = "zstd-1.5.7.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:910bd9eac2488439f597504756b03c74aa63ed71b21e5d0aa2c7e249b3f1c13f"}, - {file = "zstd-1.5.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9838ec7eb9f1beb2f611b9bcac7a169cb3de708ccf779aead29787e4482fe232"}, - {file = "zstd-1.5.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:83a36bb1fd574422a77b36ccf3315ab687aef9a802b0c3312ca7006b74eeb109"}, - {file = "zstd-1.5.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6f8189bc58415758bbbd419695012194f5e5e22c34553712d9a3eb009c09808d"}, - {file = "zstd-1.5.7.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:632e3c1b7e1ebb0580f6d92b781a8f7901d367cf72725d5642e6d3a32e404e45"}, - {file = "zstd-1.5.7.2-cp39-cp39-manylinux_2_4_i686.whl", hash = "sha256:df8083c40fdbfe970324f743f0b5ecc244c37736e5f3ad2670de61dde5e0b024"}, - {file = "zstd-1.5.7.2-cp39-cp39-manylinux_2_4_x86_64.whl", hash = "sha256:300db1ede4d10f8b9b3b99ca52b22f0e2303dc4f1cf6994d1f8345ce22dd5a7e"}, - {file = "zstd-1.5.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:97b908ccb385047b0c020ce3dc55e6f51078c9790722fdb3620c076be4a69ecf"}, - {file = "zstd-1.5.7.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c59218bd36a7431a40591504f299de836ea0d63bc68ea76d58c4cf5262f0fa3c"}, - {file = "zstd-1.5.7.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4d5a85344193ec967d05da8e2c10aed400e2d83e16041d2fdfb713cfc8caceeb"}, - {file = "zstd-1.5.7.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ebf6c1d7f0ceb0af5a383d2a1edc8ab9ace655e62a41c8a4ed5a031ee2ef8006"}, - {file = "zstd-1.5.7.2-cp39-cp39-win32.whl", hash = "sha256:44a5142123d59a0dbbd9ba9720c23521be57edbc24202223a5e17405c3bdd4a6"}, - {file = "zstd-1.5.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:8dc542a9818712a9fb37563fa88cdbbbb2b5f8733111d412b718fa602b83ba45"}, - {file = "zstd-1.5.7.2-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:24371a7b0475eef7d933c72067d363c5dc17282d2aa5d4f5837774378718509e"}, - {file = "zstd-1.5.7.2-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:c21d44981b068551f13097be3809fadb7f81617d0c21b2c28a7d04653dde958f"}, - {file = "zstd-1.5.7.2-pp27-pypy_73-manylinux_2_14_x86_64.whl", hash = "sha256:b011bf4cfad78cdf9116d6731234ff181deb9560645ffdcc8d54861ae5d1edfc"}, - {file = "zstd-1.5.7.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:426e5c6b7b3e2401b734bfd08050b071e17c15df5e3b31e63651d1fd9ba4c751"}, - {file = "zstd-1.5.7.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:53375b23f2f39359ade944169bbd88f8895eed91290ee608ccbc28810ac360ba"}, - {file = "zstd-1.5.7.2-pp310-pypy310_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:1b301b2f9dbb0e848093127fb10cbe6334a697dc3aea6740f0bb726450ee9a34"}, - {file = "zstd-1.5.7.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5414c9ae27069ab3ec8420fe8d005cb1b227806cbc874a7b4c73a96b4697a633"}, - {file = "zstd-1.5.7.2-pp311-pypy311_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:5fb2ff5718fe89181223c23ce7308bd0b4a427239379e2566294da805d8df68a"}, - {file = "zstd-1.5.7.2-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:9714d5642867fceb22e4ab74aebf81a2e62dc9206184d603cb39277b752d5885"}, - {file = "zstd-1.5.7.2-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:6584fd081a6e7d92dffa8e7373d1fced6b3cbf473154b82c17a99438c5e1de51"}, - {file = "zstd-1.5.7.2-pp36-pypy36_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:52f27a198e2a72632bae12ec63ebaa31b10e3d5f3dd3df2e01376979b168e2e6"}, - {file = "zstd-1.5.7.2-pp36-pypy36_pp73-win32.whl", hash = "sha256:3b14793d2a2cb3a7ddd1cf083321b662dd20bc11143abc719456e9bfd22a32aa"}, - {file = "zstd-1.5.7.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:faf3fd38ba26167c5a085c04b8c931a216f1baf072709db7a38e61dea52e316e"}, - {file = "zstd-1.5.7.2-pp37-pypy37_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:d17ac6d2584168247796174e599d4adbee00153246287e68881efaf8d48a6970"}, - {file = "zstd-1.5.7.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9a24d492c63555b55e6bc73a9e82a38bf7c3e8f7cde600f079210ed19cb061f2"}, - {file = "zstd-1.5.7.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c6abf4ab9a9d1feb14bc3cbcc32d723d340ce43b79b1812805916f3ac069b073"}, - {file = "zstd-1.5.7.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:d7131bb4e55d075cb7847555a1e17fca5b816a550c9b9ac260c01799b6f8e8d9"}, - {file = "zstd-1.5.7.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a03608499794148f39c932c508d4eb3622e79ca2411b1d0438a2ee8cafdc0111"}, - {file = "zstd-1.5.7.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:86e64c71b4d00bf28be50e4941586e7874bdfa74858274d9f7571dd5dda92086"}, - {file = "zstd-1.5.7.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0f79492bf86aef6e594b11e29c5589ddd13253db3ada0c7a14fb176b132fb65e"}, - {file = "zstd-1.5.7.2-pp38-pypy38_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:8c3f4bb8508bc54c00532931da4a5261f08493363da14a5526c986765973e35d"}, - {file = "zstd-1.5.7.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:787bcf55cefc08d27aca34c6dcaae1a24940963d1a73d4cec894ee458c541ac4"}, - {file = "zstd-1.5.7.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f97f872cb78a4fd60b6c1024a65a4c52a971e9d991f33c7acd833ee73050f85"}, - {file = "zstd-1.5.7.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:5e530b75452fdcff4ea67268d9e7cb37a38e7abbac84fa845205f0b36da81aaf"}, - {file = "zstd-1.5.7.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7c1cc65fc2789dd97a98202df840537de186ed04fd1804a17fcb15d1232442c4"}, - {file = "zstd-1.5.7.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:05604a693fa53b60ca083992324b08dafd15a4ac37ac4cffe4b43b9eb93d4440"}, - {file = "zstd-1.5.7.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:baf4e8b46d8934d4e85373f303eb048c63897fc4191d8ab301a1bbdf30b7a3cc"}, - {file = "zstd-1.5.7.2-pp39-pypy39_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:8cc35cc25e2d4a0f68020f05cba96912a2881ebaca890d990abe37aa3aa27045"}, - {file = "zstd-1.5.7.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:ceae57e369e1b821b8f2b4c59bc08acd27d8e4bf9687bfa5211bc4cdb080fe7b"}, - {file = "zstd-1.5.7.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5189fb44c44ab9b6c45f734bd7093a67686193110dc90dcfaf0e3a31b2385f38"}, - {file = "zstd-1.5.7.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:f51a965871b25911e06d421212f9be7f7bcd3cedc43ea441a8a73fad9952baa0"}, - {file = "zstd-1.5.7.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:624022851c51dd6d6b31dbfd793347c4bd6339095e8383e2f74faf4f990b04c6"}, - {file = "zstd-1.5.7.2.tar.gz", hash = "sha256:6d8684c69009be49e1b18ec251a5eb0d7e24f93624990a8a124a1da66a92fc8a"}, + {file = "zstd-1.5.7.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e72b353870286648a63261437b75f297e2967a26f210da4dfa4c08949935de7a"}, + {file = "zstd-1.5.7.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:26aff5f24caeffde35f1b757499e935bc60a8e0d9e1ea8bde05dcf7d53df9325"}, + {file = "zstd-1.5.7.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:586a820fbd06e3d9a9d9def572e779254bf8dee7406b8c6dc44eff6807d60c6d"}, + {file = "zstd-1.5.7.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:35a147b10fd16ebb3a2595e361780388feb8f336d70772a05dfb7a8348a47bfd"}, + {file = "zstd-1.5.7.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:c2a80c51e2175ffcd6f08b2a4c9fbc121aad69fbbcebb3364e783a96d0488fda"}, + {file = "zstd-1.5.7.3-cp27-cp27mu-manylinux_2_4_i686.whl", hash = "sha256:5f20f74a782f3296d1585d9bbc49d422e339b154c66398c74537e433446c51ba"}, + {file = "zstd-1.5.7.3-cp27-cp27mu-manylinux_2_4_x86_64.whl", hash = "sha256:2550c2e6bfbff0904f28821005f176bfdaec1872d60053665a284fb0254a10e7"}, + {file = "zstd-1.5.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:76f3535616887a1a38e8c6d0de693a23c5bb1f190651eb20d96bfc8e4ab706a0"}, + {file = "zstd-1.5.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:67507937e8e4c2a8dfed8e7fa77f4043ec9e6e831a5faebf0f99138b1a25ccbd"}, + {file = "zstd-1.5.7.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd0a2309c524608ce7b940abcc9f8eb5447c6ea2c834a630e0081211ab9d40ec"}, + {file = "zstd-1.5.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:2b497306580d544406b5414c8485c4037a9283ad2ca6ae4ccdf3732c9563141d"}, + {file = "zstd-1.5.7.3-cp310-cp310-manylinux_2_4_i686.whl", hash = "sha256:e9939a98ea946d1f9e8f9fecc940ae939b8e9e5ef9d71b104f7843567d764f30"}, + {file = "zstd-1.5.7.3-cp310-cp310-manylinux_2_4_x86_64.whl", hash = "sha256:d32c0fe8f6b805b7cbeaade462b094a843e84d893d8c6f66ab705e8777cc1850"}, + {file = "zstd-1.5.7.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:8aa33b1ef24602b2ef1e8aa67ea3c8f821854a4dbf70c3c8c46b96b54b6ceb5d"}, + {file = "zstd-1.5.7.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1bd69fa9c4c97fd04206c919dedbf9f75f544ebb77880db51a13c1e3802cd655"}, + {file = "zstd-1.5.7.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:aee96742a64ede2e35dc0316ef0cd1e50089e889ce77e82ca8edf40174a1439c"}, + {file = "zstd-1.5.7.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ac207573d2815a51f4f4fd4e255408396491729a01f690b9f5fb672d39e5610"}, + {file = "zstd-1.5.7.3-cp310-cp310-win32.whl", hash = "sha256:04e62e4f9eba79699d072d3c96731ed4aff99f1d334eb967489b091186a6078f"}, + {file = "zstd-1.5.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:0794b23b9950af240888087d2bd5943aa4be67273ba32cdafabdc5704778b90e"}, + {file = "zstd-1.5.7.3-cp310-cp310-win_arm64.whl", hash = "sha256:7827fd4901f3e71a7a755d26719549658f08e04fdf0870a952ed08e71b484435"}, + {file = "zstd-1.5.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a3c1781a24e2ced2c0ddee11d45b1f04018b03615eeb622a62eca4d56d3358a"}, + {file = "zstd-1.5.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6c7c81056362b60a04baa34632e713d596662a860ec34efd8e9b109c10e6ec7"}, + {file = "zstd-1.5.7.3-cp311-cp311-manylinux_2_14_x86_64.whl", hash = "sha256:e564f34a55effc7d654eb293468edc80b64d476b0f899f82760ecd8323223ff5"}, + {file = "zstd-1.5.7.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:fbc49a57188184931d5e3c9f1133cad7eea5a370a9e9418fb8122d58c14340a5"}, + {file = "zstd-1.5.7.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d121d3e63722819e1fe5effbcd9628d8a7cfea0cddabcc5bb37ea861a6a83424"}, + {file = "zstd-1.5.7.3-cp311-cp311-manylinux_2_4_i686.whl", hash = "sha256:621f2e7ca8e9eb52a83eb9c91ec3cd283d87591bf75cc658de486b65f44742c7"}, + {file = "zstd-1.5.7.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:c1950fcae690ba32d0f31702b335c548fb42547821565925e48576afdad774a5"}, + {file = "zstd-1.5.7.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bac4f0d03da69115878bedbfa03c4a3f64364e8396b432028c4ce0f05141a0fb"}, + {file = "zstd-1.5.7.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:da0ab134b7fd28023dedf013751ca850de300a090eb11f689d2a1c178c87d9dc"}, + {file = "zstd-1.5.7.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b9923175842ee8f7602ec9cc578f5fc396896f0e8460d3ac9a5adc3cea77244e"}, + {file = "zstd-1.5.7.3-cp311-cp311-win32.whl", hash = "sha256:0612b604948d7b58aecc6788c7ceb53c5f21d94a155bb6ea9bd0f54ffa43725d"}, + {file = "zstd-1.5.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:5b7f8c81b2bd3b62c0345242247d484cafa4b518d59d18619813d9225af5c5c3"}, + {file = "zstd-1.5.7.3-cp311-cp311-win_arm64.whl", hash = "sha256:ea112e3acd9e1765adca35df7b54ac75b36194290f64ea03a3a59664209c8527"}, + {file = "zstd-1.5.7.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01a39efb0eeab7cc45cb308618233b624b0840d5e16dcf85456b6cca0592f203"}, + {file = "zstd-1.5.7.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7a8e8838cf35fa3987bfe1958584cc22e1797efce8e155a63544b4144fc671f8"}, + {file = "zstd-1.5.7.3-cp312-cp312-manylinux_2_14_i686.whl", hash = "sha256:f3920ac1d1cc7e9f252f3e29f217fe3cd36f2191bb3dbcae826c29e189b7ad54"}, + {file = "zstd-1.5.7.3-cp312-cp312-manylinux_2_14_x86_64.whl", hash = "sha256:143f9062953fb5590cbd47c1040d357336742c79696bf90b6d5b835279a68304"}, + {file = "zstd-1.5.7.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36d1fd8647e47e1f21b345e192f1a279e925678c23dad8236b547d04456cd699"}, + {file = "zstd-1.5.7.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1538db419afa62773cf534fc7f3009ff59ecf55ecee4e889587ac2ef0010ed8"}, + {file = "zstd-1.5.7.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c5efd16adb092e2a547a7d51cfdaf6fd5680528227684c5bafc7669ab4a55f41"}, + {file = "zstd-1.5.7.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:39b3438e64637d80a5b1860526903b92020acb9bae9ceb5adffd9838c1441328"}, + {file = "zstd-1.5.7.3-cp312-cp312-win32.whl", hash = "sha256:cbf48c53461e224ffc2490cfe5120a1ff40d14c84d2b512c6d6d99fc91685cf3"}, + {file = "zstd-1.5.7.3-cp312-cp312-win_amd64.whl", hash = "sha256:943a189910f2fea997462e3e4d7fbf727a06d231ef801ebee557b1c87568981c"}, + {file = "zstd-1.5.7.3-cp312-cp312-win_arm64.whl", hash = "sha256:85c4d508f8109afa7c51c4960626c3325af2cf1e442c6c36ebfea15d04757e3f"}, + {file = "zstd-1.5.7.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b2455e56f1d265dacbd450510b8c2f632a5d8d92c23282e7723fb04af37001a2"}, + {file = "zstd-1.5.7.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3486dc4f1b4e52bb059f8eec1f31daa3e540062c0f522f221782cf132a8bc9a8"}, + {file = "zstd-1.5.7.3-cp313-cp313-manylinux_2_14_i686.whl", hash = "sha256:1cb47bf10ffcb6a782edacfe758da2c94879f7e89c6628feb3f1254daf8cc596"}, + {file = "zstd-1.5.7.3-cp313-cp313-manylinux_2_14_x86_64.whl", hash = "sha256:07b1378d1230ddeea8773f99d7518a3060e6468c76edd502057cb795fe278d7e"}, + {file = "zstd-1.5.7.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ee34317f013e3405108f5baea53502159809cfc4510598d614257525500c70d"}, + {file = "zstd-1.5.7.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c19127ca2c79855376a34a2d7a6969408094b25c1f44485b0373eba4be851b98"}, + {file = "zstd-1.5.7.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2e79cae70dd08cb247391312463085c624c0302e8c860d13f87f4c76502d8202"}, + {file = "zstd-1.5.7.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0e83e91e5daf89037c737f5529da0f80da80a78a6ad0b1d70a09860eb267dea4"}, + {file = "zstd-1.5.7.3-cp313-cp313-win32.whl", hash = "sha256:2283f3bb910c028e1b9fe76b834016012ab021025a0ea197e27a1333f85e3031"}, + {file = "zstd-1.5.7.3-cp313-cp313-win_amd64.whl", hash = "sha256:3ad5fe4c36bab5dfa5a4b8d050bd07c50c1e69f94d381bc65337ab14cd69e5b1"}, + {file = "zstd-1.5.7.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e878172b0eb69ac2edc6576eb862e00747c7c25e638fb354630a1ea7cfddf49"}, + {file = "zstd-1.5.7.3-cp313-cp313t-manylinux_2_14_x86_64.whl", hash = "sha256:7e0a7e94d5b63b4cacf2396079ca9584d11f49f87cb4e5aa21f126a8f6b83446"}, + {file = "zstd-1.5.7.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:5412c86c34cbaf6906433ef3f2c96c407f208782f06cd3e5f01f066788adb3b8"}, + {file = "zstd-1.5.7.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f94246befb1e473211a298c96e5768f3c63eaad814ac14d160d79ae9858e1d03"}, + {file = "zstd-1.5.7.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:31050e17a1a546fb82c90eee8ee3c30d22b9d0594b5937e69d38b7a5084af2a2"}, + {file = "zstd-1.5.7.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8ba8ec5dfd48c86d19f880713246f85d09ee06e8cd17141956258650878000d6"}, + {file = "zstd-1.5.7.3-cp314-cp314-manylinux_2_14_i686.whl", hash = "sha256:3005540ba406157f3e205c998709ab5f8e68b390c658c7c238eb8986092089d5"}, + {file = "zstd-1.5.7.3-cp314-cp314-manylinux_2_14_x86_64.whl", hash = "sha256:3934b54a3b7df039fcd4cf7b0f0a38c86ce44d26321255ffc3fac73d6cdcc59d"}, + {file = "zstd-1.5.7.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e9230cd3e9153e2bed16f332558f8f3f7d869f4d15e8fa3f9c360bfa163a8b4a"}, + {file = "zstd-1.5.7.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5bffba70af539f14f9df5367b1add9119f14d5e35b658aef7b765417ea461e0e"}, + {file = "zstd-1.5.7.3-cp314-cp314-win32.whl", hash = "sha256:a006e70c88ab67bb56989e11d820adc7601a6a7ad5558b3c6c690b19a1dadc5b"}, + {file = "zstd-1.5.7.3-cp314-cp314-win_amd64.whl", hash = "sha256:cb4957c330c7b94b0546c7b9529723b49e865608683b9503a251fe793da9d4db"}, + {file = "zstd-1.5.7.3-cp314-cp314-win_arm64.whl", hash = "sha256:a785426081ab7cafe4522876ac771d701766deea9a6d8352e87744da00e6637f"}, + {file = "zstd-1.5.7.3-cp314-cp314t-manylinux_2_14_i686.whl", hash = "sha256:b52ef154793be0399befd742328ec6f5dff95154248d6d18dd65851cf22a1a5f"}, + {file = "zstd-1.5.7.3-cp314-cp314t-manylinux_2_14_x86_64.whl", hash = "sha256:8024a8ba9156b1b2e64e69d147df5ddedeaed107f9da02a3428fd7baf3e5b920"}, + {file = "zstd-1.5.7.3-cp315-cp315-manylinux_2_14_i686.whl", hash = "sha256:31ac7fbacca4759aad4b6abc13bbc05e68788e9e85a968255f7624b3b8db31df"}, + {file = "zstd-1.5.7.3-cp315-cp315-manylinux_2_14_x86_64.whl", hash = "sha256:d03b2927c5843ded4d1319836a33a9c21675d2f86f916a2f234a060d4c67d87c"}, + {file = "zstd-1.5.7.3-cp315-cp315t-manylinux_2_14_i686.whl", hash = "sha256:5dfbf2564eb574fc1f45613ecf28036a82533c3dd70e7bb1c9854168c638da7a"}, + {file = "zstd-1.5.7.3-cp315-cp315t-manylinux_2_14_x86_64.whl", hash = "sha256:7f2f5776b902f41daf7b63e75a9384b0d7c855f824f14dabefc67814b8fa5611"}, + {file = "zstd-1.5.7.3-cp34-cp34m-manylinux_2_4_i686.whl", hash = "sha256:ffbeabcabcb644d29289277f9023aa51c04de71935695f5388da9c8428c81e0f"}, + {file = "zstd-1.5.7.3-cp34-cp34m-manylinux_2_4_x86_64.whl", hash = "sha256:0b891ca9ad84562941367ab7be817b8748df75eb6b7ced23d5b082b4602c1c6e"}, + {file = "zstd-1.5.7.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:925f83e2e749cd7109985bc96835cd2fd814435d74f0d9a1d7c8506166e97592"}, + {file = "zstd-1.5.7.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:57d2ff6b96886aaec2aa4721f7c8e890a8b43b5c4ae4f3737a0733b55cd82daa"}, + {file = "zstd-1.5.7.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:8cd516ba02e0f9e6df1b4a6dc0cd5e66ac6eeb55b15833a70d529aa32eddaa91"}, + {file = "zstd-1.5.7.3-cp35-cp35m-manylinux_2_14_x86_64.whl", hash = "sha256:9f6ea980866f43ff7ef5e41eac54b94f9159b9807f32f691b02ca381b50b76af"}, + {file = "zstd-1.5.7.3-cp35-cp35m-manylinux_2_4_i686.whl", hash = "sha256:3e650ed68b655d55556099aa62f168a352396139a879a94312322a1d02502491"}, + {file = "zstd-1.5.7.3-cp35-cp35m-win32.whl", hash = "sha256:da88b288a2844f04713df89a514dd9dc0e925ee63e119c845aef14ccbcc9183e"}, + {file = "zstd-1.5.7.3-cp35-cp35m-win_amd64.whl", hash = "sha256:96c949e8508f2d4dced3444a3bfb99d51653ac6f28ef0aa1561f5758adc8afed"}, + {file = "zstd-1.5.7.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7509b11b5f8313e87cce16269e222f89e7e49b51f1e6a3e7454b7c7b599d3211"}, + {file = "zstd-1.5.7.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fb8aafd47ba73ff50a7994668dbec5c97f26ddcd28c03242d8f8b4138d8c723c"}, + {file = "zstd-1.5.7.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:586efc62d7e93d52d0b3951ef48a4b5181866152061bda1bef49f7ea85ec0d7f"}, + {file = "zstd-1.5.7.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5030d51631a09a0d7b3e47f928b6234bd78ce8b897a255fc1146e8cf772a8f4d"}, + {file = "zstd-1.5.7.3-cp36-cp36m-manylinux_2_14_x86_64.whl", hash = "sha256:a8d1ee9faa89b21ff03ae3fe8d969e850c60b8c3f8a1389fa585c10eddaa2bb4"}, + {file = "zstd-1.5.7.3-cp36-cp36m-manylinux_2_4_i686.whl", hash = "sha256:4504ba7a9ddd1919e919f81d3ec541313e6826f1f3cad8e3a7ebe29a3ae5cda6"}, + {file = "zstd-1.5.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aca7d1fef13f412168ac524307586f0d57f96a89bd7e0620b2f60df3b0066c8d"}, + {file = "zstd-1.5.7.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:12d2925424d02add2f835c7549106151ece9eae262e96aee34af5d84178ba824"}, + {file = "zstd-1.5.7.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:30512cce4108b26ede395ac521c0997c340bd19f177a1c0260bbffcb64861d30"}, + {file = "zstd-1.5.7.3-cp37-cp37m-manylinux_2_4_i686.whl", hash = "sha256:2e6caf5f3084e6473a6dfd15285c47122ba92f4fb97ecfca855adf415603532a"}, + {file = "zstd-1.5.7.3-cp37-cp37m-manylinux_2_4_x86_64.whl", hash = "sha256:927c95b991e81f39b02e42c9b391f2b3569e6dbe29d7fc2dce6ca778475c0934"}, + {file = "zstd-1.5.7.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:2174fd7f588b2eb95a402c3d40f4676370eb50292362a0995295084b8f5d521e"}, + {file = "zstd-1.5.7.3-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:3b05817bfdfc395999b6b3c9ea4f7c05e91bceafc3fc819906d5f0445afa4335"}, + {file = "zstd-1.5.7.3-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:c67f0fcf4348343d25ecd35a44d33b6d31814e9ab3ee8676039de809579905a4"}, + {file = "zstd-1.5.7.3-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:40195c0056841aad6553172963adecf31b6ae1fdb9778d657ce9a2493d1791ee"}, + {file = "zstd-1.5.7.3-cp37-cp37m-win32.whl", hash = "sha256:b6ac3ae562758184fc1570399ea9d269163b488dbb0c4a44701e89f61ca6d1d6"}, + {file = "zstd-1.5.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:e9f059d9c9f6f13ae78bfa9778755462b3ea53e4a5185941169422dd97c9fd22"}, + {file = "zstd-1.5.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:99e92b97c97d83e403615c12b644e8616fc7e8a8b4fa0c0558bcb9980baf5c92"}, + {file = "zstd-1.5.7.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a6b4ff0d5704994eb0d7ba2ea0b25acd749bb78a1c325289a8cba7651f0cbbff"}, + {file = "zstd-1.5.7.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:edf4b595ab29a980f6f60fa71c64ab029d9ced97fb9c7c9ae555fe1159d8379d"}, + {file = "zstd-1.5.7.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:3cd48ec1dce8a8a06a3978225b20f28b7764e4191c436277e0abc60539e040da"}, + {file = "zstd-1.5.7.3-cp38-cp38-manylinux_2_4_i686.whl", hash = "sha256:1380ecc510a3885fad326863a7f42b3391560b471aeea60b04f9c1ece439b198"}, + {file = "zstd-1.5.7.3-cp38-cp38-manylinux_2_4_x86_64.whl", hash = "sha256:5fdff5190698e6d48a3facb58085a6c33b62be610f40e80299d975dbc75b32c8"}, + {file = "zstd-1.5.7.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:595d6495e96744fa5c9b78f38e8379f9eebfb97ae4f7ecc2639af4fd51459e07"}, + {file = "zstd-1.5.7.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9bc3d6b7f2dec391b7539a0f43deb07bca1d68867082a07a286c2237f16390fd"}, + {file = "zstd-1.5.7.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b8e62d533281946100c023a1168bd8935db6452bdd0f0b776afe8e80255e74c3"}, + {file = "zstd-1.5.7.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3a5dcc7ddcd56f131bee612b5feadd9b65e3996c0f4c6a485e2b2f20e7a324de"}, + {file = "zstd-1.5.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbb497482dd63abe72a209345dbafa52817bd484c1d08139da080c14b1dadc7b"}, + {file = "zstd-1.5.7.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a599489d4e7e794981536521ee5dcfa61b0a641996409669b9aba5400b5cff83"}, + {file = "zstd-1.5.7.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4a7ec28ca27fc347d7325eeb06d66cd2649846d5bfe77b18beed38d1870dd876"}, + {file = "zstd-1.5.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:703481b41e5b3d33cd4e6a0b7116e8bc33a712aba1526d5fcad3e4303dd70fa1"}, + {file = "zstd-1.5.7.3-cp39-cp39-manylinux_2_4_i686.whl", hash = "sha256:61b0707c090d59ba879eac4b475562c5b9c1b375d0419d78fb398f156037f7df"}, + {file = "zstd-1.5.7.3-cp39-cp39-manylinux_2_4_x86_64.whl", hash = "sha256:7090ac97b14dea2969ba1ed427b38efe137efcdf556dc8740d3e035b04cbc8b4"}, + {file = "zstd-1.5.7.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:5204bf9f3f2936ee3a28bfe43a57b78f88439c1777197295a0661d6de38caa80"}, + {file = "zstd-1.5.7.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:431d4fecf764c305f29c1b9117d0d2ec5eb5523fc81516f1ee82509cb3b8e088"}, + {file = "zstd-1.5.7.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c2f213a32ab5e90bf165717f05fc1e3c214eeca7b6a33311e2397d89879c2f87"}, + {file = "zstd-1.5.7.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f87d617dac84b571bb74dc9d6905c66906dca982143adbe8e497ba2ce888cca"}, + {file = "zstd-1.5.7.3-cp39-cp39-win32.whl", hash = "sha256:9511957b5b8b5c0d4e737dff3a330a445a44005e09278bb8c799a76eb7f99d90"}, + {file = "zstd-1.5.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:9389848cc8297199b0fe2cd2985e5944f611ed518aa508136065ea0159051904"}, + {file = "zstd-1.5.7.3-cp39-cp39-win_arm64.whl", hash = "sha256:0cdf00f53cd38ce1f9edc79f68727150b9e65f4b33a3e8b59d94d0886cf43dbf"}, + {file = "zstd-1.5.7.3-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:c5ac39836233356d32d0fe3d2f9525373c47c19f75fde68c16cf2293b7648b86"}, + {file = "zstd-1.5.7.3-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:62fe5b560f389fdb40384a1711b7737bd9e27861f248cb89f19fed90a4cf0830"}, + {file = "zstd-1.5.7.3-pp27-pypy_73-manylinux_2_14_x86_64.whl", hash = "sha256:55fb8ac423800811f8b0c896b9617ecc91a1d4da15f66fb42ba162bfa5aa5a2d"}, + {file = "zstd-1.5.7.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2b9ec4d5ba8c170d3fdf21ae5da3c15eaea2beef9c419a5f3274a6f9e03c412a"}, + {file = "zstd-1.5.7.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7ab69fc4d90eeb64b98a567751f8e48373f4bcf301597fca344b8e8342e1d5e"}, + {file = "zstd-1.5.7.3-pp310-pypy310_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da70f0918bf739bc75d7770410c9b94ea0dcb6f02d7ef70598b464bd5fcb193a"}, + {file = "zstd-1.5.7.3-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3dd5c069d0409284f1963b0b6b119f21b1da9e22a503e88933eb0696249d87d3"}, + {file = "zstd-1.5.7.3-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:46ca4a075f36f118e2ce07ba07d9ece7aeda193cea6f50b82aaee635df7b5fc2"}, + {file = "zstd-1.5.7.3-pp310-pypy310_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:4a521cb7615fc61bfe9514bea182e224894b5987fc7843b6d6da20a61206ef24"}, + {file = "zstd-1.5.7.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:71ea22c953a164f34eb4b8c2c3b97eaa22da6a75296ea80b3ba4473187f15046"}, + {file = "zstd-1.5.7.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:76c49ea969bc08389ea59155cea7c5dea224522ffc62f443f3c0a915f5fd184d"}, + {file = "zstd-1.5.7.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b1a638ff3dfce8f4cb1203c662fb5606dd99b4a62c5ddc4c406d2d1326bcfdd"}, + {file = "zstd-1.5.7.3-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5e96a5cb100a0edc162935227f2d9784b1031ce4a8a83e96e66eae2673c10143"}, + {file = "zstd-1.5.7.3-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bda0bbf3a9553720cd33f1f85940a259656c7ffba4be717ff82b7f062052188"}, + {file = "zstd-1.5.7.3-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac36e4022422f6e49b3f07bdbb8a964fd348223d3dc9c82ad5398a4f0432a719"}, + {file = "zstd-1.5.7.3-pp311-pypy311_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:fa4d760a220541b18ce732a3a2cf7547ea05afc76d05b3b39edebfeb721f6079"}, + {file = "zstd-1.5.7.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a69e60146bf8aaa6a0e6c9a94a7c5f3133d68091e2e5c5a3c5ababf71fd5ec7a"}, + {file = "zstd-1.5.7.3-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:781ec2644a3ce84c1cc19b0e057e1e8ea45260a8871eb6524614be75c9b432b9"}, + {file = "zstd-1.5.7.3-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:ab74f37f2832d4a7c89d877ed9a70b1ef988fc2353678a122427039eb1dc6e36"}, + {file = "zstd-1.5.7.3-pp36-pypy36_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:521a3072fedcce025515d99242e346318d1815789033b7c0108796e151c42deb"}, + {file = "zstd-1.5.7.3-pp36-pypy36_pp73-win32.whl", hash = "sha256:94d404fd56765ff2952053cb2f6f980b88e3384a71af147c3ede9f6c6bea32d6"}, + {file = "zstd-1.5.7.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:33f7e24d626938234c3c33df1988b79846628cf08dfab216bb19f85e7fcad65b"}, + {file = "zstd-1.5.7.3-pp37-pypy37_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:c0c84fd4a87f28b8bed01cbaf128d33dfa209f03df2890dbc8c01e17a109c2d4"}, + {file = "zstd-1.5.7.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:0e334e45becf5a4844c8d64593eb358585e1553a7355f2172c865efc639ac051"}, + {file = "zstd-1.5.7.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:15523e289509d7792418edb8c255cc1dacc65cda000428424c988208a682b8be"}, + {file = "zstd-1.5.7.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:2924befc3cb1a2310e1c03bd93469a2de8f0703e8805fe1f40367fbc2cece472"}, + {file = "zstd-1.5.7.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:173680156dbe959c80d72a1f15ef2034fd414b9d1ee507df152e416bc37665ef"}, + {file = "zstd-1.5.7.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31d66b73a9861ee61bc6486fb9d1d33eabc86e506e49a210f30a91a241b8e643"}, + {file = "zstd-1.5.7.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a820a67491c1cf7a66698478a28b7d2517b0ae2e2775d834ca4f2624ba859e72"}, + {file = "zstd-1.5.7.3-pp38-pypy38_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:c385f92c37f4275d477388e46af8941580d7eeaad4c524c8f9aa50d016acbc7e"}, + {file = "zstd-1.5.7.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:cecce78a3d639a3c439b1e355791e0f1ddbe8ed63d94f34c7973e92d384e6fc0"}, + {file = "zstd-1.5.7.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e769fc830f5e2079612a27d6540e4147cd8dc8beacfaf73a48152f30a191e979"}, + {file = "zstd-1.5.7.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:37a6750c25b561b05110313fdde4acd51246075a317e1c7a2491c96d2d863282"}, + {file = "zstd-1.5.7.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0e95265e22f07cea6675baab762c9c4577a40d47824b01e0dcdf1a18b46aa041"}, + {file = "zstd-1.5.7.3-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:878d859a7e1ebc078e0a575c05bcf3b0682b77cabd65bdbdd5e93c137ff1799b"}, + {file = "zstd-1.5.7.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7efcf83189be9d842b9392ffd821b317cbd9447a49c590659abd3311e82c1676"}, + {file = "zstd-1.5.7.3-pp39-pypy39_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:a75dfdbca7dc01e7b35ca9b22e5b9792037b1515857e67b34bd737b213e49432"}, + {file = "zstd-1.5.7.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5235dde49df717e5ca58f689e110bf1c4ed578170ab59e77f8a7a5055e4d8c07"}, + {file = "zstd-1.5.7.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f876acad51d2184269ee6fd7e4c4aad9b7a0eca174d7d8db981ea079b57cbaf4"}, + {file = "zstd-1.5.7.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:2920e90ef200c7b2cbc73b4271c2271abf6195877b813ede0b5b76289e32fc8e"}, + {file = "zstd-1.5.7.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1f6dd0f2845a9817f0d0920eb0efd2d8a0168b71b8d8c85d2655d9d997f127ba"}, + {file = "zstd-1.5.7.3.tar.gz", hash = "sha256:403e5205f4ac04b92e6b0cda654be2f51de268228a0db0067bc087faacf2f495"}, ] [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.13" -content-hash = "c40ff4d1cb06db047a7a39c35f6c765c259583c203da549146a0ed2e6d17a727" +content-hash = "87a29ee9d43486c2769aedae09b8f37b1f3709b62d39d602962307072e4d71ab" diff --git a/api/pyproject.toml b/api/pyproject.toml index 889a0eb519..711b20092a 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -36,6 +36,8 @@ dependencies = [ "drf-simple-apikey (==2.2.1)", "matplotlib (>=3.10.6,<4.0.0)", "reportlab (>=4.4.4,<5.0.0)", + "neo4j (<6.0.0)", + "cartography @ git+https://github.com/prowler-cloud/cartography@master", "gevent (>=25.9.1,<26.0.0)", "werkzeug (>=3.1.4)", "sqlparse (>=0.5.4)", diff --git a/api/src/backend/api/apps.py b/api/src/backend/api/apps.py index add97cf376..6c7eadb631 100644 --- a/api/src/backend/api/apps.py +++ b/api/src/backend/api/apps.py @@ -1,4 +1,5 @@ import logging +import atexit import os import sys from pathlib import Path @@ -30,6 +31,7 @@ class ApiConfig(AppConfig): def ready(self): from api import schema_extensions # noqa: F401 from api import signals # noqa: F401 + from api.attack_paths import database as graph_database from api.compliance import load_prowler_compliance # Generate required cryptographic keys if not present, but only if: @@ -39,6 +41,10 @@ class ApiConfig(AppConfig): if "manage.py" not in sys.argv or os.environ.get("RUN_MAIN"): self._ensure_crypto_keys() + if not getattr(settings, "TESTING", False): + graph_database.init_driver() + atexit.register(graph_database.close_driver) + load_prowler_compliance() def _ensure_crypto_keys(self): @@ -54,7 +60,7 @@ class ApiConfig(AppConfig): global _keys_initialized # Skip key generation if running tests - if hasattr(settings, "TESTING") and settings.TESTING: + if getattr(settings, "TESTING", False): return # Skip if already initialized in this process diff --git a/api/src/backend/api/attack_paths/__init__.py b/api/src/backend/api/attack_paths/__init__.py new file mode 100644 index 0000000000..2c3ea4c5d8 --- /dev/null +++ b/api/src/backend/api/attack_paths/__init__.py @@ -0,0 +1,13 @@ +from api.attack_paths.query_definitions import ( + AttackPathsQueryDefinition, + AttackPathsQueryParameterDefinition, + get_queries_for_provider, + get_query_by_id, +) + +__all__ = [ + "AttackPathsQueryDefinition", + "AttackPathsQueryParameterDefinition", + "get_queries_for_provider", + "get_query_by_id", +] diff --git a/api/src/backend/api/attack_paths/database.py b/api/src/backend/api/attack_paths/database.py new file mode 100644 index 0000000000..eaa9da3713 --- /dev/null +++ b/api/src/backend/api/attack_paths/database.py @@ -0,0 +1,144 @@ +import logging +import threading + +from contextlib import contextmanager +from typing import Iterator +from uuid import UUID + +import neo4j +import neo4j.exceptions + +from django.conf import settings + +from api.attack_paths.retryable_session import RetryableSession + +# Without this Celery goes crazy with Neo4j logging +logging.getLogger("neo4j").setLevel(logging.ERROR) +logging.getLogger("neo4j").propagate = False + +SERVICE_UNAVAILABLE_MAX_RETRIES = 3 + +# Module-level process-wide driver singleton +_driver: neo4j.Driver | None = None +_lock = threading.Lock() + +# Base Neo4j functions + + +def get_uri() -> str: + host = settings.DATABASES["neo4j"]["HOST"] + port = settings.DATABASES["neo4j"]["PORT"] + return f"bolt://{host}:{port}" + + +def init_driver() -> neo4j.Driver: + global _driver + if _driver is not None: + return _driver + + with _lock: + if _driver is None: + uri = get_uri() + config = settings.DATABASES["neo4j"] + + _driver = neo4j.GraphDatabase.driver( + uri, auth=(config["USER"], config["PASSWORD"]) + ) + _driver.verify_connectivity() + + return _driver + + +def get_driver() -> neo4j.Driver: + return init_driver() + + +def close_driver() -> None: # TODO: Use it + global _driver + with _lock: + if _driver is not None: + try: + _driver.close() + + finally: + _driver = None + + +@contextmanager +def get_session(database: str | None = None) -> Iterator[RetryableSession]: + session_wrapper: RetryableSession | None = None + + try: + session_wrapper = RetryableSession( + session_factory=lambda: get_driver().session(database=database), + close_driver=close_driver, # Just to avoid circular imports + max_retries=SERVICE_UNAVAILABLE_MAX_RETRIES, + ) + yield session_wrapper + + except neo4j.exceptions.Neo4jError as exc: + raise GraphDatabaseQueryException(message=exc.message, code=exc.code) + + finally: + if session_wrapper is not None: + session_wrapper.close() + + +def create_database(database: str) -> None: + query = "CREATE DATABASE $database IF NOT EXISTS" + parameters = {"database": database} + + with get_session() as session: + session.run(query, parameters) + + +def drop_database(database: str) -> None: + query = f"DROP DATABASE `{database}` IF EXISTS DESTROY DATA" + + with get_session() as session: + session.run(query) + + +def drop_subgraph(database: str, root_node_label: str, root_node_id: str) -> int: + query = """ + MATCH (a:__ROOT_NODE_LABEL__ {id: $root_node_id}) + CALL apoc.path.subgraphNodes(a, {}) + YIELD node + DETACH DELETE node + RETURN COUNT(node) AS deleted_nodes_count + """.replace("__ROOT_NODE_LABEL__", root_node_label) + parameters = {"root_node_id": root_node_id} + + with get_session(database) as session: + result = session.run(query, parameters) + + try: + return result.single()["deleted_nodes_count"] + + except neo4j.exceptions.ResultConsumedError: + return 0 # As there are no nodes to delete, the result is empty + + +# Neo4j functions related to Prowler + Cartography +DATABASE_NAME_TEMPLATE = "db-{attack_paths_scan_id}" + + +def get_database_name(attack_paths_scan_id: UUID) -> str: + attack_paths_scan_id_str = str(attack_paths_scan_id).lower() + return DATABASE_NAME_TEMPLATE.format(attack_paths_scan_id=attack_paths_scan_id_str) + + +# Exceptions + + +class GraphDatabaseQueryException(Exception): + def __init__(self, message: str, code: str | None = None) -> None: + super().__init__(message) + self.message = message + self.code = code + + def __str__(self) -> str: + if self.code: + return f"{self.code}: {self.message}" + + return self.message diff --git a/api/src/backend/api/attack_paths/query_definitions.py b/api/src/backend/api/attack_paths/query_definitions.py new file mode 100644 index 0000000000..bb976bbbff --- /dev/null +++ b/api/src/backend/api/attack_paths/query_definitions.py @@ -0,0 +1,514 @@ +from dataclasses import dataclass, field + + +# Dataclases for handling API's Attack Path query definitions and their parameters +@dataclass +class AttackPathsQueryParameterDefinition: + """ + Metadata describing a parameter that must be provided to an Attack Paths query. + """ + + name: str + label: str + data_type: str = "string" + cast: type = str + description: str | None = None + placeholder: str | None = None + + +@dataclass +class AttackPathsQueryDefinition: + """ + Immutable representation of an Attack Path query. + """ + + id: str + name: str + description: str + provider: str + cypher: str + parameters: list[AttackPathsQueryParameterDefinition] = field(default_factory=list) + + +# Accessor functions for API's Attack Paths query definitions +def get_queries_for_provider(provider: str) -> list[AttackPathsQueryDefinition]: + return _QUERY_DEFINITIONS.get(provider, []) + + +def get_query_by_id(query_id: str) -> AttackPathsQueryDefinition | None: + return _QUERIES_BY_ID.get(query_id) + + +# API's Attack Paths query definitions +_QUERY_DEFINITIONS: dict[str, list[AttackPathsQueryDefinition]] = { + "aws": [ + # Custom query for detecting internet-exposed EC2 instances with sensitive S3 access + AttackPathsQueryDefinition( + id="aws-internet-exposed-ec2-sensitive-s3-access", + name="Identify internet-exposed EC2 instances with sensitive S3 access", + description="Detect EC2 instances with SSH exposed to the internet that can assume higher-privileged roles to read tagged sensitive S3 buckets despite bucket-level public access blocks.", + provider="aws", + cypher=""" + CALL apoc.create.vNode(['Internet'], {id: 'Internet', name: 'Internet'}) + YIELD node AS internet + + MATCH path_s3 = (aws:AWSAccount {id: $provider_uid})--(s3:S3Bucket)--(t:AWSTag) + WHERE toLower(t.key) = toLower($tag_key) AND toLower(t.value) = toLower($tag_value) + + MATCH path_ec2 = (aws)--(ec2:EC2Instance)--(sg:EC2SecurityGroup)--(ipi:IpPermissionInbound) + WHERE ec2.exposed_internet = true + AND ipi.toport = 22 + + MATCH path_role = (r:AWSRole)--(pol:AWSPolicy)--(stmt:AWSPolicyStatement) + WHERE ANY(x IN stmt.resource WHERE x CONTAINS s3.name) + AND ANY(x IN stmt.action WHERE toLower(x) =~ 's3:(listbucket|getobject).*') + + MATCH path_assume_role = (ec2)-[p:STS_ASSUMEROLE_ALLOW*1..9]-(r:AWSRole) + + CALL apoc.create.vRelationship(internet, 'CAN_ACCESS', {}, ec2) + YIELD rel AS can_access + + UNWIND nodes(path_s3) + nodes(path_ec2) + nodes(path_role) + nodes(path_assume_role) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path_s3, path_ec2, path_role, path_assume_role, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access + """, + parameters=[ + AttackPathsQueryParameterDefinition( + name="tag_key", + label="Tag key", + description="Tag key to filter the S3 bucket, e.g. DataClassification.", + placeholder="DataClassification", + ), + AttackPathsQueryParameterDefinition( + name="tag_value", + label="Tag value", + description="Tag value to filter the S3 bucket, e.g. Sensitive.", + placeholder="Sensitive", + ), + ], + ), + # Regular Cartography Attack Paths queries + AttackPathsQueryDefinition( + id="aws-rds-instances", + name="Identify provisioned RDS instances", + description="List the selected AWS account alongside the RDS instances it owns.", + provider="aws", + cypher=""" + MATCH path = (aws:AWSAccount {id: $provider_uid})--(rds:RDSInstance) + + UNWIND nodes(path) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr + """, + parameters=[], + ), + AttackPathsQueryDefinition( + id="aws-rds-unencrypted-storage", + name="Identify RDS instances without storage encryption", + description="Find RDS instances with storage encryption disabled within the selected account.", + provider="aws", + cypher=""" + MATCH path = (aws:AWSAccount {id: $provider_uid})--(rds:RDSInstance) + WHERE rds.storage_encrypted = false + + UNWIND nodes(path) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr + """, + parameters=[], + ), + AttackPathsQueryDefinition( + id="aws-s3-anonymous-access-buckets", + name="Identify S3 buckets with anonymous access", + description="Find S3 buckets that allow anonymous access within the selected account.", + provider="aws", + cypher=""" + MATCH path = (aws:AWSAccount {id: $provider_uid})--(s3:S3Bucket) + WHERE s3.anonymous_access = true + + UNWIND nodes(path) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr + """, + parameters=[], + ), + AttackPathsQueryDefinition( + id="aws-iam-statements-allow-all-actions", + name="Identify IAM statements that allow all actions", + description="Find IAM policy statements that allow all actions via '*' within the selected account.", + provider="aws", + cypher=""" + MATCH path = (aws:AWSAccount {id: $provider_uid})--(principal:AWSPrincipal)--(pol:AWSPolicy)--(stmt:AWSPolicyStatement) + WHERE stmt.effect = 'Allow' + AND any(x IN stmt.action WHERE x = '*') + + UNWIND nodes(path) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr + """, + parameters=[], + ), + AttackPathsQueryDefinition( + id="aws-iam-statements-allow-delete-policy", + name="Identify IAM statements that allow iam:DeletePolicy", + description="Find IAM policy statements that allow the iam:DeletePolicy action within the selected account.", + provider="aws", + cypher=""" + MATCH path = (aws:AWSAccount {id: $provider_uid})--(principal:AWSPrincipal)--(pol:AWSPolicy)--(stmt:AWSPolicyStatement) + WHERE stmt.effect = 'Allow' + AND any(x IN stmt.action WHERE x = "iam:DeletePolicy") + + UNWIND nodes(path) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr + """, + parameters=[], + ), + AttackPathsQueryDefinition( + id="aws-iam-statements-allow-create-actions", + name="Identify IAM statements that allow create actions", + description="Find IAM policy statements that allow actions containing 'create' within the selected account.", + provider="aws", + cypher=""" + MATCH path = (aws:AWSAccount {id: $provider_uid})--(principal:AWSPrincipal)--(pol:AWSPolicy)--(stmt:AWSPolicyStatement) + WHERE stmt.effect = "Allow" + AND any(x IN stmt.action WHERE toLower(x) CONTAINS "create") + + UNWIND nodes(path) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr + """, + parameters=[], + ), + AttackPathsQueryDefinition( + id="aws-ec2-instances-internet-exposed", + name="Identify internet-exposed EC2 instances", + description="Find EC2 instances flagged as exposed to the internet within the selected account.", + provider="aws", + cypher=""" + CALL apoc.create.vNode(['Internet'], {id: 'Internet', name: 'Internet'}) + YIELD node AS internet + + MATCH path = (aws:AWSAccount {id: $provider_uid})--(ec2:EC2Instance) + WHERE ec2.exposed_internet = true + + CALL apoc.create.vRelationship(internet, 'CAN_ACCESS', {}, ec2) + YIELD rel AS can_access + + UNWIND nodes(path) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access + """, + parameters=[], + ), + AttackPathsQueryDefinition( + id="aws-security-groups-open-internet-facing", + name="Identify internet-facing resources with open security groups", + description="Find internet-facing resources associated with security groups that allow inbound access from '0.0.0.0/0'.", + provider="aws", + cypher=""" + CALL apoc.create.vNode(['Internet'], {id: 'Internet', name: 'Internet'}) + YIELD node AS internet + + MATCH path_open = (aws:AWSAccount {id: $provider_uid})-[r0]-(open) + MATCH path_sg = (open)-[r1:MEMBER_OF_EC2_SECURITY_GROUP]-(sg:EC2SecurityGroup) + MATCH path_ip = (sg)-[r2:MEMBER_OF_EC2_SECURITY_GROUP]-(ipi:IpPermissionInbound) + MATCH path_ipi = (ipi)-[r3]-(ir:IpRange) + WHERE ir.range = "0.0.0.0/0" + OPTIONAL MATCH path_dns = (dns:AWSDNSRecord)-[:DNS_POINTS_TO]->(lb) + WHERE open.scheme = 'internet-facing' + + CALL apoc.create.vRelationship(internet, 'CAN_ACCESS', {}, open) + YIELD rel AS can_access + + UNWIND nodes(path_open) + nodes(path_sg) + nodes(path_ip) + nodes(path_ipi) + nodes(path_dns) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path_open, path_sg, path_ip, path_ipi, path_dns, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access + """, + parameters=[], + ), + AttackPathsQueryDefinition( + id="aws-classic-elb-internet-exposed", + name="Identify internet-exposed Classic Load Balancers", + description="Find Classic Load Balancers exposed to the internet along with their listeners.", + provider="aws", + cypher=""" + CALL apoc.create.vNode(['Internet'], {id: 'Internet', name: 'Internet'}) + YIELD node AS internet + + MATCH path = (aws:AWSAccount {id: $provider_uid})--(elb:LoadBalancer)--(listener:ELBListener) + WHERE elb.exposed_internet = true + + CALL apoc.create.vRelationship(internet, 'CAN_ACCESS', {}, elb) + YIELD rel AS can_access + + UNWIND nodes(path) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access + """, + parameters=[], + ), + AttackPathsQueryDefinition( + id="aws-elbv2-internet-exposed", + name="Identify internet-exposed ELBv2 load balancers", + description="Find ELBv2 load balancers exposed to the internet along with their listeners.", + provider="aws", + cypher=""" + CALL apoc.create.vNode(['Internet'], {id: 'Internet', name: 'Internet'}) + YIELD node AS internet + + MATCH path = (aws:AWSAccount {id: $provider_uid})--(elbv2:LoadBalancerV2)--(listener:ELBV2Listener) + WHERE elbv2.exposed_internet = true + + CALL apoc.create.vRelationship(internet, 'CAN_ACCESS', {}, elbv2) + YIELD rel AS can_access + + UNWIND nodes(path) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access + """, + parameters=[], + ), + AttackPathsQueryDefinition( + id="aws-public-ip-resource-lookup", + name="Identify resources by public IP address", + description="Given a public IP address, find the related AWS resource and its adjacent node within the selected account.", + provider="aws", + cypher=""" + CALL apoc.create.vNode(['Internet'], {id: 'Internet', name: 'Internet'}) + YIELD node AS internet + + CALL () { + MATCH path = (aws:AWSAccount {id: $provider_uid})-[r]-(x:EC2PrivateIp)-[q]-(y) + WHERE x.public_ip = $ip + RETURN path, x + + UNION MATCH path = (aws:AWSAccount {id: $provider_uid})-[r]-(x:EC2Instance)-[q]-(y) + WHERE x.publicipaddress = $ip + RETURN path, x + + UNION MATCH path = (aws:AWSAccount {id: $provider_uid})-[r]-(x:NetworkInterface)-[q]-(y) + WHERE x.public_ip = $ip + RETURN path, x + + UNION MATCH path = (aws:AWSAccount {id: $provider_uid})-[r]-(x:ElasticIPAddress)-[q]-(y) + WHERE x.public_ip = $ip + RETURN path, x + } + + WITH path, x, internet + + CALL apoc.create.vRelationship(internet, 'CAN_ACCESS', {}, x) + YIELD rel AS can_access + + UNWIND nodes(path) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path, collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr, internet, can_access + """, + parameters=[ + AttackPathsQueryParameterDefinition( + name="ip", + label="IP address", + description="Public IP address, e.g. 192.0.2.0.", + placeholder="192.0.2.0", + ), + ], + ), + # Privilege Escalation Queries (based on pathfinding.cloud research): https://github.com/DataDog/pathfinding.cloud + AttackPathsQueryDefinition( + id="aws-iam-privesc-passrole-ec2", + name="Privilege Escalation: iam:PassRole + ec2:RunInstances", + description="Detect principals who can launch EC2 instances with privileged IAM roles attached. This allows gaining the permissions of the passed role by accessing the EC2 instance metadata service. This is a new-passrole escalation path (pathfinding.cloud: ec2-001).", + provider="aws", + cypher=""" + // Create a single shared virtual EC2 instance node + CALL apoc.create.vNode(['EC2Instance'], { + id: 'potential-ec2-passrole', + name: 'New EC2 Instance', + description: 'Attacker-controlled EC2 with privileged role' + }) + YIELD node AS ec2_node + + // Create a single shared virtual escalation outcome node (styled like a finding) + CALL apoc.create.vNode(['PrivilegeEscalation'], { + id: 'effective-administrator-passrole-ec2', + check_title: 'Privilege Escalation', + name: 'Effective Administrator', + status: 'FAIL', + severity: 'critical' + }) + YIELD node AS escalation_outcome + + WITH ec2_node, escalation_outcome + + // Find principals in the account + MATCH path_principal = (aws:AWSAccount {id: $provider_uid})--(principal:AWSPrincipal) + + // Find statements granting iam:PassRole + MATCH path_passrole = (principal)--(passrole_policy:AWSPolicy)--(stmt_passrole:AWSPolicyStatement) + WHERE stmt_passrole.effect = 'Allow' + AND any(action IN stmt_passrole.action WHERE + toLower(action) = 'iam:passrole' + OR toLower(action) = 'iam:*' + OR action = '*' + ) + + // Find statements granting ec2:RunInstances + MATCH path_ec2 = (principal)--(ec2_policy:AWSPolicy)--(stmt_ec2:AWSPolicyStatement) + WHERE stmt_ec2.effect = 'Allow' + AND any(action IN stmt_ec2.action WHERE + toLower(action) = 'ec2:runinstances' + OR toLower(action) = 'ec2:*' + OR action = '*' + ) + + // Find roles that trust EC2 service (can be passed to EC2) + MATCH path_target = (aws)--(target_role:AWSRole) + WHERE target_role.arn CONTAINS $provider_uid + // Check if principal can pass this role + AND any(resource IN stmt_passrole.resource WHERE + resource = '*' + OR target_role.arn CONTAINS resource + OR resource CONTAINS target_role.name + ) + + // Check if target role has elevated permissions (optional, for severity assessment) + OPTIONAL MATCH (target_role)--(role_policy:AWSPolicy)--(role_stmt:AWSPolicyStatement) + WHERE role_stmt.effect = 'Allow' + AND ( + any(action IN role_stmt.action WHERE action = '*') + OR any(action IN role_stmt.action WHERE toLower(action) = 'iam:*') + ) + + CALL apoc.create.vRelationship(principal, 'CAN_LAUNCH', { + via: 'ec2:RunInstances + iam:PassRole' + }, ec2_node) + YIELD rel AS launch_rel + + CALL apoc.create.vRelationship(ec2_node, 'ASSUMES_ROLE', {}, target_role) + YIELD rel AS assumes_rel + + CALL apoc.create.vRelationship(target_role, 'GRANTS_ACCESS', { + reference: 'https://pathfinding.cloud/paths/ec2-001' + }, escalation_outcome) + YIELD rel AS grants_rel + + UNWIND nodes(path_principal) + nodes(path_passrole) + nodes(path_ec2) + nodes(path_target) as n + OPTIONAL MATCH (n)-[pfr]-(pf:ProwlerFinding) + WHERE pf.status = 'FAIL' + + RETURN path_principal, path_passrole, path_ec2, path_target, + ec2_node, escalation_outcome, launch_rel, assumes_rel, grants_rel, + collect(DISTINCT pf) as dpf, collect(DISTINCT pfr) as dpfr + """, + parameters=[], + ), + AttackPathsQueryDefinition( + id="aws-glue-privesc-passrole-dev-endpoint", + name="Privilege Escalation: Glue Dev Endpoint with PassRole", + description="Detect principals that can escalate privileges by passing a role to a Glue development endpoint. The attacker creates a dev endpoint with an arbitrary role attached, then accesses those credentials through the endpoint.", + provider="aws", + cypher=""" + CALL apoc.create.vNode(['PrivilegeEscalation'], { + id: 'effective-administrator-glue', + check_title: 'Privilege Escalation', + name: 'Effective Administrator (Glue)', + status: 'FAIL', + severity: 'critical' + }) + YIELD node AS escalation_outcome + + WITH escalation_outcome + + // Find principals in the account + MATCH path_principal = (aws:AWSAccount {id: $provider_uid})--(principal:AWSPrincipal) + + // Principal can assume roles (up to 2 hops) + OPTIONAL MATCH path_assume = (principal)-[:STS_ASSUMEROLE_ALLOW*0..2]->(acting_as:AWSRole) + WITH escalation_outcome, principal, path_principal, path_assume, + CASE WHEN path_assume IS NULL THEN principal ELSE acting_as END AS effective_principal + + // Find iam:PassRole permission + MATCH path_passrole = (effective_principal)--(passrole_policy:AWSPolicy)--(passrole_stmt:AWSPolicyStatement) + WHERE passrole_stmt.effect = 'Allow' + AND any(action IN passrole_stmt.action WHERE toLower(action) = 'iam:passrole' OR action = '*') + + // Find Glue CreateDevEndpoint permission + MATCH (effective_principal)--(glue_policy:AWSPolicy)--(glue_stmt:AWSPolicyStatement) + WHERE glue_stmt.effect = 'Allow' + AND any(action IN glue_stmt.action WHERE toLower(action) = 'glue:createdevendpoint' OR action = '*' OR toLower(action) = 'glue:*') + + // Find target role with elevated permissions + MATCH (aws)--(target_role:AWSRole)--(target_policy:AWSPolicy)--(target_stmt:AWSPolicyStatement) + WHERE target_stmt.effect = 'Allow' + AND ( + any(action IN target_stmt.action WHERE action = '*') + OR any(action IN target_stmt.action WHERE toLower(action) = 'iam:*') + ) + + // Deduplicate before creating virtual nodes + WITH DISTINCT escalation_outcome, aws, principal, effective_principal, target_role + + // Create virtual Glue endpoint node (one per unique principal->target pair) + CALL apoc.create.vNode(['GlueDevEndpoint'], { + name: 'New Dev Endpoint', + description: 'Glue endpoint with target role attached', + id: effective_principal.arn + '->' + target_role.arn + }) + YIELD node AS glue_endpoint + + CALL apoc.create.vRelationship(effective_principal, 'CREATES_ENDPOINT', { + permissions: ['iam:PassRole', 'glue:CreateDevEndpoint'], + technique: 'new-passrole' + }, glue_endpoint) + YIELD rel AS create_rel + + CALL apoc.create.vRelationship(glue_endpoint, 'RUNS_AS', {}, target_role) + YIELD rel AS runs_rel + + CALL apoc.create.vRelationship(target_role, 'GRANTS_ACCESS', { + reference: 'https://pathfinding.cloud/paths/glue-001' + }, escalation_outcome) + YIELD rel AS grants_rel + + // Re-match paths for visualization + MATCH path_principal = (aws)--(principal) + MATCH path_target = (aws)--(target_role) + + RETURN path_principal, path_target, + glue_endpoint, escalation_outcome, create_rel, runs_rel, grants_rel + """, + parameters=[], + ), + ], +} + +_QUERIES_BY_ID: dict[str, AttackPathsQueryDefinition] = { + definition.id: definition + for definitions in _QUERY_DEFINITIONS.values() + for definition in definitions +} diff --git a/api/src/backend/api/attack_paths/retryable_session.py b/api/src/backend/api/attack_paths/retryable_session.py new file mode 100644 index 0000000000..79bf383fff --- /dev/null +++ b/api/src/backend/api/attack_paths/retryable_session.py @@ -0,0 +1,89 @@ +import logging + +from collections.abc import Callable +from typing import Any + +import neo4j +import neo4j.exceptions + +logger = logging.getLogger(__name__) + + +class RetryableSession: + """ + Wrapper around `neo4j.Session` that retries `neo4j.exceptions.ServiceUnavailable` errors. + """ + + def __init__( + self, + session_factory: Callable[[], neo4j.Session], + close_driver: Callable[[], None], # Just to avoid circular imports + max_retries: int, + ) -> None: + self._session_factory = session_factory + self._close_driver = close_driver + self._max_retries = max(0, max_retries) + self._session = self._session_factory() + + def close(self) -> None: + if self._session is not None: + self._session.close() + self._session = None + + def __enter__(self) -> "RetryableSession": + return self + + def __exit__( + self, _: Any, __: Any, ___: Any + ) -> None: # Unused args: exc_type, exc, exc_tb + self.close() + + def run(self, *args: Any, **kwargs: Any) -> Any: + return self._call_with_retry("run", *args, **kwargs) + + def write_transaction(self, *args: Any, **kwargs: Any) -> Any: + return self._call_with_retry("write_transaction", *args, **kwargs) + + def read_transaction(self, *args: Any, **kwargs: Any) -> Any: + return self._call_with_retry("read_transaction", *args, **kwargs) + + def execute_write(self, *args: Any, **kwargs: Any) -> Any: + return self._call_with_retry("execute_write", *args, **kwargs) + + def execute_read(self, *args: Any, **kwargs: Any) -> Any: + return self._call_with_retry("execute_read", *args, **kwargs) + + def __getattr__(self, item: str) -> Any: + return getattr(self._session, item) + + def _call_with_retry(self, method_name: str, *args: Any, **kwargs: Any) -> Any: + attempt = 0 + last_exc: neo4j.exceptions.ServiceUnavailable | None = None + + while attempt <= self._max_retries: + try: + method = getattr(self._session, method_name) + return method(*args, **kwargs) + + except ( + neo4j.exceptions.ServiceUnavailable + ) as exc: # pragma: no cover - depends on infra + last_exc = exc + attempt += 1 + + if attempt > self._max_retries: + raise + + logger.warning( + f"Neo4j session {method_name} failed with ServiceUnavailable ({attempt}/{self._max_retries} attempts). Retrying..." + ) + self._refresh_session() + + raise last_exc if last_exc else RuntimeError("Unexpected retry loop exit") + + def _refresh_session(self) -> None: + if self._session is not None: + self._session.close() + + self._close_driver() + self._session = self._session_factory() diff --git a/api/src/backend/api/attack_paths/views_helpers.py b/api/src/backend/api/attack_paths/views_helpers.py new file mode 100644 index 0000000000..7418a0302e --- /dev/null +++ b/api/src/backend/api/attack_paths/views_helpers.py @@ -0,0 +1,143 @@ +import logging + +from typing import Any + +from rest_framework.exceptions import APIException, ValidationError + +from api.attack_paths import database as graph_database, AttackPathsQueryDefinition +from api.models import AttackPathsScan +from config.custom_logging import BackendLogger + +logger = logging.getLogger(BackendLogger.API) + + +def normalize_run_payload(raw_data): + if not isinstance(raw_data, dict): # Let the serializer handle this + return raw_data + + if "data" in raw_data and isinstance(raw_data.get("data"), dict): + data_section = raw_data.get("data") or {} + attributes = data_section.get("attributes") or {} + payload = { + "id": attributes.get("id", data_section.get("id")), + "parameters": attributes.get("parameters"), + } + + # Remove `None` parameters to allow defaults downstream + if payload.get("parameters") is None: + payload.pop("parameters") + return payload + + return raw_data + + +def prepare_query_parameters( + definition: AttackPathsQueryDefinition, + provided_parameters: dict[str, Any], + provider_uid: str, +) -> dict[str, Any]: + parameters = dict(provided_parameters or {}) + expected_names = {parameter.name for parameter in definition.parameters} + provided_names = set(parameters.keys()) + + unexpected = provided_names - expected_names + if unexpected: + raise ValidationError( + {"parameters": f"Unknown parameter(s): {', '.join(sorted(unexpected))}"} + ) + + missing = expected_names - provided_names + if missing: + raise ValidationError( + { + "parameters": f"Missing required parameter(s): {', '.join(sorted(missing))}" + } + ) + + clean_parameters = { + "provider_uid": str(provider_uid), + } + + for definition_parameter in definition.parameters: + raw_value = provided_parameters[definition_parameter.name] + + try: + casted_value = definition_parameter.cast(raw_value) + + except (ValueError, TypeError) as exc: + raise ValidationError( + { + "parameters": ( + f"Invalid value for parameter `{definition_parameter.name}`: {str(exc)}" + ) + } + ) + + clean_parameters[definition_parameter.name] = casted_value + + return clean_parameters + + +def execute_attack_paths_query( + attack_paths_scan: AttackPathsScan, + definition: AttackPathsQueryDefinition, + parameters: dict[str, Any], +) -> dict[str, Any]: + try: + with graph_database.get_session(attack_paths_scan.graph_database) as session: + result = session.run(definition.cypher, parameters) + return _serialize_graph(result.graph()) + + except graph_database.GraphDatabaseQueryException as exc: + logger.error(f"Query failed for Attack Paths query `{definition.id}`: {exc}") + raise APIException( + "Attack Paths query execution failed due to a database error" + ) + + +def _serialize_graph(graph): + nodes = [] + for node in graph.nodes: + nodes.append( + { + "id": node.element_id, + "labels": list(node.labels), + "properties": _serialize_properties(node._properties), + }, + ) + + relationships = [] + for relationship in graph.relationships: + relationships.append( + { + "id": relationship.element_id, + "label": relationship.type, + "source": relationship.start_node.element_id, + "target": relationship.end_node.element_id, + "properties": _serialize_properties(relationship._properties), + }, + ) + + return { + "nodes": nodes, + "relationships": relationships, + } + + +def _serialize_properties(properties: dict[str, Any]) -> dict[str, Any]: + """Convert Neo4j property values into JSON-serializable primitives.""" + + def _serialize_value(value: Any) -> Any: + # Neo4j temporal and spatial values expose `to_native` returning Python primitives + if hasattr(value, "to_native") and callable(value.to_native): + return _serialize_value(value.to_native()) + + if isinstance(value, (list, tuple)): + return [_serialize_value(item) for item in value] + + if isinstance(value, dict): + return {key: _serialize_value(val) for key, val in value.items()} + + return value + + return {key: _serialize_value(val) for key, val in properties.items()} diff --git a/api/src/backend/api/filters.py b/api/src/backend/api/filters.py index 008a3cc77d..579888e206 100644 --- a/api/src/backend/api/filters.py +++ b/api/src/backend/api/filters.py @@ -29,6 +29,7 @@ from api.models import ( Finding, Integration, Invitation, + AttackPathsScan, LighthouseProviderConfiguration, LighthouseProviderModels, Membership, @@ -396,6 +397,23 @@ class ScanFilter(ProviderRelationshipFilterSet): } +class AttackPathsScanFilter(ProviderRelationshipFilterSet): + inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date") + completed_at = DateFilter(field_name="completed_at", lookup_expr="date") + started_at = DateFilter(field_name="started_at", lookup_expr="date") + state = ChoiceFilter(choices=StateChoices.choices) + state__in = ChoiceInFilter( + field_name="state", choices=StateChoices.choices, lookup_expr="in" + ) + + class Meta: + model = AttackPathsScan + fields = { + "provider": ["exact", "in"], + "scan": ["exact", "in"], + } + + class TaskFilter(FilterSet): name = CharFilter(field_name="task_runner_task__task_name", lookup_expr="exact") name__icontains = CharFilter( diff --git a/api/src/backend/api/fixtures/dev/8_dev_attack_paths_scans.json b/api/src/backend/api/fixtures/dev/8_dev_attack_paths_scans.json new file mode 100644 index 0000000000..fdf310458a --- /dev/null +++ b/api/src/backend/api/fixtures/dev/8_dev_attack_paths_scans.json @@ -0,0 +1,41 @@ +[ + { + "model": "api.attackpathsscan", + "pk": "a7f0f6de-6f8e-4b3a-8cbe-3f6dd9012345", + "fields": { + "tenant": "12646005-9067-4d2a-a098-8bb378604362", + "provider": "b85601a8-4b45-4194-8135-03fb980ef428", + "scan": "01920573-aa9c-73c9-bcda-f2e35c9b19d2", + "state": "completed", + "progress": 100, + "update_tag": 1693586667, + "graph_database": "db-a7f0f6de-6f8e-4b3a-8cbe-3f6dd9012345", + "is_graph_database_deleted": false, + "task": null, + "inserted_at": "2024-09-01T17:24:37Z", + "updated_at": "2024-09-01T17:44:37Z", + "started_at": "2024-09-01T17:34:37Z", + "completed_at": "2024-09-01T17:44:37Z", + "duration": 269, + "ingestion_exceptions": {} + } + }, + { + "model": "api.attackpathsscan", + "pk": "4a2fb2af-8a60-4d7d-9cae-4ca65e098765", + "fields": { + "tenant": "12646005-9067-4d2a-a098-8bb378604362", + "provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555", + "scan": "01929f3b-ed2e-7623-ad63-7c37cd37828f", + "state": "executing", + "progress": 48, + "update_tag": 1697625000, + "graph_database": "db-4a2fb2af-8a60-4d7d-9cae-4ca65e098765", + "is_graph_database_deleted": false, + "task": null, + "inserted_at": "2024-10-18T10:55:57Z", + "updated_at": "2024-10-18T10:56:15Z", + "started_at": "2024-10-18T10:56:05Z" + } + } +] diff --git a/api/src/backend/api/migrations/0070_attack_paths_scan.py b/api/src/backend/api/migrations/0070_attack_paths_scan.py new file mode 100644 index 0000000000..3e63d3353b --- /dev/null +++ b/api/src/backend/api/migrations/0070_attack_paths_scan.py @@ -0,0 +1,154 @@ +# Generated by Django 5.1.13 on 2025-11-06 16:20 + +import django.db.models.deletion + +from django.db import migrations, models +from uuid6 import uuid7 + +import api.rls + + +class Migration(migrations.Migration): + dependencies = [ + ("api", "0069_resource_resource_group"), + ] + + operations = [ + migrations.CreateModel( + name="AttackPathsScan", + fields=[ + ( + "id", + models.UUIDField( + default=uuid7, + editable=False, + primary_key=True, + serialize=False, + ), + ), + ("inserted_at", models.DateTimeField(auto_now_add=True)), + ("updated_at", models.DateTimeField(auto_now=True)), + ( + "state", + api.db_utils.StateEnumField( + choices=[ + ("available", "Available"), + ("scheduled", "Scheduled"), + ("executing", "Executing"), + ("completed", "Completed"), + ("failed", "Failed"), + ("cancelled", "Cancelled"), + ], + default="available", + ), + ), + ("progress", models.IntegerField(default=0)), + ("started_at", models.DateTimeField(blank=True, null=True)), + ("completed_at", models.DateTimeField(blank=True, null=True)), + ( + "duration", + models.IntegerField( + blank=True, help_text="Duration in seconds", null=True + ), + ), + ( + "update_tag", + models.BigIntegerField( + blank=True, + help_text="Cartography update tag (epoch)", + null=True, + ), + ), + ( + "graph_database", + models.CharField(blank=True, max_length=63, null=True), + ), + ( + "is_graph_database_deleted", + models.BooleanField(default=False), + ), + ( + "ingestion_exceptions", + models.JSONField(blank=True, default=dict, null=True), + ), + ( + "provider", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="attack_paths_scans", + related_query_name="attack_paths_scan", + to="api.provider", + ), + ), + ( + "scan", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="attack_paths_scans", + related_query_name="attack_paths_scan", + to="api.scan", + ), + ), + ( + "task", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="attack_paths_scans", + related_query_name="attack_paths_scan", + to="api.task", + ), + ), + ( + "tenant", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="api.tenant" + ), + ), + ], + options={ + "db_table": "attack_paths_scans", + "abstract": False, + "indexes": [ + models.Index( + fields=["tenant_id", "provider_id", "-inserted_at"], + name="aps_prov_ins_desc_idx", + ), + models.Index( + fields=["tenant_id", "state", "-inserted_at"], + name="aps_state_ins_desc_idx", + ), + models.Index( + fields=["tenant_id", "scan_id"], + name="aps_scan_lookup_idx", + ), + models.Index( + fields=["tenant_id", "provider_id"], + name="aps_active_graph_idx", + include=["graph_database", "id"], + condition=models.Q(("is_graph_database_deleted", False)), + ), + models.Index( + fields=["tenant_id", "provider_id", "-completed_at"], + name="aps_completed_graph_idx", + include=["graph_database", "id"], + condition=models.Q( + ("state", "completed"), + ("is_graph_database_deleted", False), + ), + ), + ], + }, + ), + migrations.AddConstraint( + model_name="attackpathsscan", + constraint=api.rls.RowLevelSecurityConstraint( + "tenant_id", + name="rls_on_attackpathsscan", + statements=["SELECT", "INSERT", "UPDATE", "DELETE"], + ), + ), + ] diff --git a/api/src/backend/api/models.py b/api/src/backend/api/models.py index 68bfa78561..4f7eb7ba6d 100644 --- a/api/src/backend/api/models.py +++ b/api/src/backend/api/models.py @@ -626,6 +626,101 @@ class Scan(RowLevelSecurityProtectedModel): resource_name = "scans" +class AttackPathsScan(RowLevelSecurityProtectedModel): + objects = ActiveProviderManager() + all_objects = models.Manager() + + id = models.UUIDField(primary_key=True, default=uuid7, editable=False) + inserted_at = models.DateTimeField(auto_now_add=True, editable=False) + updated_at = models.DateTimeField(auto_now=True, editable=False) + + state = StateEnumField(choices=StateChoices.choices, default=StateChoices.AVAILABLE) + progress = models.IntegerField(default=0) + + # Timing + started_at = models.DateTimeField(null=True, blank=True) + completed_at = models.DateTimeField(null=True, blank=True) + duration = models.IntegerField( + null=True, blank=True, help_text="Duration in seconds" + ) + + # Relationship to the provider and optional prowler Scan and celery Task + provider = models.ForeignKey( + "Provider", + on_delete=models.CASCADE, + related_name="attack_paths_scans", + related_query_name="attack_paths_scan", + ) + scan = models.ForeignKey( + "Scan", + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="attack_paths_scans", + related_query_name="attack_paths_scan", + ) + task = models.ForeignKey( + "Task", + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="attack_paths_scans", + related_query_name="attack_paths_scan", + ) + + # Cartography specific metadata + update_tag = models.BigIntegerField( + null=True, blank=True, help_text="Cartography update tag (epoch)" + ) + graph_database = models.CharField(max_length=63, null=True, blank=True) + is_graph_database_deleted = models.BooleanField(default=False) + ingestion_exceptions = models.JSONField(default=dict, null=True, blank=True) + + class Meta(RowLevelSecurityProtectedModel.Meta): + db_table = "attack_paths_scans" + + constraints = [ + RowLevelSecurityConstraint( + field="tenant_id", + name="rls_on_%(class)s", + statements=["SELECT", "INSERT", "UPDATE", "DELETE"], + ), + ] + + indexes = [ + models.Index( + fields=["tenant_id", "provider_id", "-inserted_at"], + name="aps_prov_ins_desc_idx", + ), + models.Index( + fields=["tenant_id", "state", "-inserted_at"], + name="aps_state_ins_desc_idx", + ), + models.Index( + fields=["tenant_id", "scan_id"], + name="aps_scan_lookup_idx", + ), + models.Index( + fields=["tenant_id", "provider_id"], + name="aps_active_graph_idx", + include=["graph_database", "id"], + condition=Q(is_graph_database_deleted=False), + ), + models.Index( + fields=["tenant_id", "provider_id", "-completed_at"], + name="aps_completed_graph_idx", + include=["graph_database", "id"], + condition=Q( + state=StateChoices.COMPLETED, + is_graph_database_deleted=False, + ), + ), + ] + + class JSONAPIMeta: + resource_name = "attack-paths-scans" + + class ResourceTag(RowLevelSecurityProtectedModel): id = models.UUIDField(primary_key=True, default=uuid4, editable=False) inserted_at = models.DateTimeField(auto_now_add=True, editable=False) diff --git a/api/src/backend/api/specs/v1.yaml b/api/src/backend/api/specs/v1.yaml index 5a55602d49..0e68d6b364 100644 --- a/api/src/backend/api/specs/v1.yaml +++ b/api/src/backend/api/specs/v1.yaml @@ -280,6 +280,439 @@ paths: schema: $ref: '#/components/schemas/OpenApiResponseResponse' description: API key was successfully revoked + /api/v1/attack-paths-scans: + get: + operationId: attack_paths_scans_list + description: Retrieve Attack Paths scans for the tenant with support for filtering, + ordering, and pagination. + summary: List Attack Paths scans + parameters: + - in: query + name: fields[attack-paths-scans] + schema: + type: array + items: + type: string + enum: + - state + - progress + - provider + - provider_alias + - provider_type + - provider_uid + - scan + - task + - inserted_at + - started_at + - completed_at + - duration + description: endpoint return only specific fields in the response on a per-type + basis by including a fields[TYPE] query parameter. + explode: false + - in: query + name: filter[completed_at] + schema: + type: string + format: date + - in: query + name: filter[inserted_at] + schema: + type: string + format: date + - in: query + name: filter[provider] + schema: + type: string + format: uuid + - in: query + name: filter[provider__in] + schema: + type: array + items: + type: string + format: uuid + description: Multiple values may be separated by commas. + explode: false + style: form + - in: query + name: filter[provider_alias] + schema: + type: string + - in: query + name: filter[provider_alias__icontains] + schema: + type: string + - in: query + name: filter[provider_alias__in] + schema: + type: array + items: + type: string + description: Multiple values may be separated by commas. + explode: false + style: form + - in: query + name: filter[provider_type] + schema: + type: string + x-spec-enum-id: 684bf4173d2b754f + enum: + - alibabacloud + - aws + - azure + - gcp + - github + - iac + - kubernetes + - m365 + - mongodbatlas + - oraclecloud + description: |- + * `aws` - AWS + * `azure` - Azure + * `gcp` - GCP + * `kubernetes` - Kubernetes + * `m365` - M365 + * `github` - GitHub + * `mongodbatlas` - MongoDB Atlas + * `iac` - IaC + * `oraclecloud` - Oracle Cloud Infrastructure + * `alibabacloud` - Alibaba Cloud + - in: query + name: filter[provider_type__in] + schema: + type: array + items: + type: string + x-spec-enum-id: 684bf4173d2b754f + enum: + - alibabacloud + - aws + - azure + - gcp + - github + - iac + - kubernetes + - m365 + - mongodbatlas + - oraclecloud + description: |- + Multiple values may be separated by commas. + + * `aws` - AWS + * `azure` - Azure + * `gcp` - GCP + * `kubernetes` - Kubernetes + * `m365` - M365 + * `github` - GitHub + * `mongodbatlas` - MongoDB Atlas + * `iac` - IaC + * `oraclecloud` - Oracle Cloud Infrastructure + * `alibabacloud` - Alibaba Cloud + explode: false + style: form + - in: query + name: filter[provider_uid] + schema: + type: string + - in: query + name: filter[provider_uid__icontains] + schema: + type: string + - in: query + name: filter[provider_uid__in] + schema: + type: array + items: + type: string + description: Multiple values may be separated by commas. + explode: false + style: form + - in: query + name: filter[scan] + schema: + type: string + format: uuid + - in: query + name: filter[scan__in] + schema: + type: array + items: + type: string + format: uuid + description: Multiple values may be separated by commas. + explode: false + style: form + - name: filter[search] + required: false + in: query + description: A search term. + schema: + type: string + - in: query + name: filter[started_at] + schema: + type: string + format: date + - in: query + name: filter[state] + schema: + type: string + x-spec-enum-id: d38ba07264e1ed34 + enum: + - available + - cancelled + - completed + - executing + - failed + - scheduled + description: |- + * `available` - Available + * `scheduled` - Scheduled + * `executing` - Executing + * `completed` - Completed + * `failed` - Failed + * `cancelled` - Cancelled + - in: query + name: filter[state__in] + schema: + type: array + items: + type: string + x-spec-enum-id: d38ba07264e1ed34 + enum: + - available + - cancelled + - completed + - executing + - failed + - scheduled + description: |- + Multiple values may be separated by commas. + + * `available` - Available + * `scheduled` - Scheduled + * `executing` - Executing + * `completed` - Completed + * `failed` - Failed + * `cancelled` - Cancelled + explode: false + style: form + - in: query + name: include + schema: + type: array + items: + type: string + enum: + - provider + - scan + - task + description: include query parameter to allow the client to customize which + related resources should be returned. + explode: false + - name: page[number] + required: false + in: query + description: A page number within the paginated result set. + schema: + type: integer + - name: page[size] + required: false + in: query + description: Number of results to return per page. + schema: + type: integer + - name: sort + required: false + in: query + description: '[list of fields to sort by](https://jsonapi.org/format/#fetching-sorting)' + schema: + type: array + items: + type: string + enum: + - inserted_at + - -inserted_at + - started_at + - -started_at + explode: false + tags: + - Attack Paths + security: + - JWT or API Key: [] + responses: + '200': + content: + application/vnd.api+json: + schema: + $ref: '#/components/schemas/PaginatedAttackPathsScanList' + description: '' + /api/v1/attack-paths-scans/{id}: + get: + operationId: attack_paths_scans_retrieve + description: Fetch full details for a specific Attack Paths scan. + summary: Retrieve Attack Paths scan details + parameters: + - in: query + name: fields[attack-paths-scans] + schema: + type: array + items: + type: string + enum: + - state + - progress + - provider + - provider_alias + - provider_type + - provider_uid + - scan + - task + - inserted_at + - started_at + - completed_at + - duration + description: endpoint return only specific fields in the response on a per-type + basis by including a fields[TYPE] query parameter. + explode: false + - in: path + name: id + schema: + type: string + format: uuid + description: A UUID string identifying this attack paths scan. + required: true + - in: query + name: include + schema: + type: array + items: + type: string + enum: + - provider + - scan + - task + description: include query parameter to allow the client to customize which + related resources should be returned. + explode: false + tags: + - Attack Paths + security: + - JWT or API Key: [] + responses: + '200': + content: + application/vnd.api+json: + schema: + $ref: '#/components/schemas/AttackPathsScanResponse' + description: '' + /api/v1/attack-paths-scans/{id}/queries: + get: + operationId: attack_paths_scans_queries_retrieve + description: Retrieve the catalog of Attack Paths queries available for this + Attack Paths scan. + summary: List attack paths queries + parameters: + - in: query + name: fields[attack-paths-scans] + schema: + type: array + items: + type: string + enum: + - state + - progress + - provider + - provider_alias + - provider_type + - provider_uid + - scan + - task + - inserted_at + - started_at + - completed_at + - duration + description: endpoint return only specific fields in the response on a per-type + basis by including a fields[TYPE] query parameter. + explode: false + - in: path + name: id + schema: + type: string + format: uuid + description: A UUID string identifying this attack paths scan. + required: true + - in: query + name: include + schema: + type: array + items: + type: string + enum: + - provider + - scan + - task + description: include query parameter to allow the client to customize which + related resources should be returned. + explode: false + tags: + - Attack Paths + security: + - JWT or API Key: [] + responses: + '200': + content: + application/vnd.api+json: + schema: + $ref: '#/components/schemas/PaginatedAttackPathsQueryList' + description: '' + '404': + description: No queries found for the selected provider + /api/v1/attack-paths-scans/{id}/queries/run: + post: + operationId: attack_paths_scans_queries_run_create + description: Execute the selected Attack Paths query against the Attack Paths + graph and return the resulting subgraph. + summary: Execute an Attack Paths query + parameters: + - in: path + name: id + schema: + type: string + format: uuid + description: A UUID string identifying this attack paths scan. + required: true + tags: + - Attack Paths + requestBody: + content: + application/vnd.api+json: + schema: + $ref: '#/components/schemas/AttackPathsQueryRunRequestRequest' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/AttackPathsQueryRunRequestRequest' + multipart/form-data: + schema: + $ref: '#/components/schemas/AttackPathsQueryRunRequestRequest' + required: true + security: + - JWT or API Key: [] + responses: + '200': + content: + application/vnd.api+json: + schema: + $ref: '#/components/schemas/OpenApiResponseResponse' + description: '' + '400': + description: Bad request (e.g., Unknown Attack Paths query for the selected + provider) + '404': + description: No attack paths found for the given query and parameters + '500': + description: Attack Paths query execution failed due to a database error /api/v1/compliance-overviews: get: operationId: compliance_overviews_list @@ -11725,6 +12158,349 @@ paths: description: '' components: schemas: + AttackPathsNode: + type: object + required: + - type + additionalProperties: false + properties: + type: + type: string + description: The [type](https://jsonapi.org/format/#document-resource-object-identification) + member is used to describe resource objects that share common attributes + and relationships. + enum: + - attack-paths-query-result-nodes + attributes: + type: object + properties: + id: + type: string + labels: + type: array + items: + type: string + properties: + type: object + additionalProperties: {} + required: + - id + - labels + - properties + AttackPathsQuery: + type: object + required: + - type + - id + additionalProperties: false + properties: + type: + type: string + description: The [type](https://jsonapi.org/format/#document-resource-object-identification) + member is used to describe resource objects that share common attributes + and relationships. + enum: + - attack-paths-queries + id: {} + attributes: + type: object + properties: + id: + type: string + name: + type: string + description: + type: string + provider: + type: string + parameters: + type: array + items: + $ref: '#/components/schemas/AttackPathsQueryParameter' + required: + - id + - name + - description + - provider + - parameters + AttackPathsQueryParameter: + type: object + required: + - type + - id + additionalProperties: false + properties: + type: + type: string + description: The [type](https://jsonapi.org/format/#document-resource-object-identification) + member is used to describe resource objects that share common attributes + and relationships. + enum: + - attack-paths-query-parameters + id: {} + attributes: + type: object + properties: + name: + type: string + label: + type: string + data_type: + type: string + default: string + description: + type: string + nullable: true + placeholder: + type: string + nullable: true + required: + - name + - label + AttackPathsQueryResult: + type: object + required: + - type + additionalProperties: false + properties: + type: + type: string + description: The [type](https://jsonapi.org/format/#document-resource-object-identification) + member is used to describe resource objects that share common attributes + and relationships. + enum: + - attack-paths-query-results + attributes: + type: object + properties: + nodes: + type: array + items: + $ref: '#/components/schemas/AttackPathsNode' + relationships: + type: array + items: + $ref: '#/components/schemas/AttackPathsRelationship' + required: + - nodes + - relationships + AttackPathsQueryRunRequestRequest: + type: object + properties: + data: + type: object + required: + - type + additionalProperties: false + properties: + type: + type: string + description: The [type](https://jsonapi.org/format/#document-resource-object-identification) + member is used to describe resource objects that share common attributes + and relationships. + enum: + - attack-paths-query-run-requests + attributes: + type: object + properties: + id: + type: string + minLength: 1 + parameters: + type: object + additionalProperties: {} + required: + - id + required: + - data + AttackPathsRelationship: + type: object + required: + - type + additionalProperties: false + properties: + type: + type: string + description: The [type](https://jsonapi.org/format/#document-resource-object-identification) + member is used to describe resource objects that share common attributes + and relationships. + enum: + - attack-paths-query-result-relationships + attributes: + type: object + properties: + id: + type: string + label: + type: string + source: + type: string + target: + type: string + properties: + type: object + additionalProperties: {} + required: + - id + - label + - source + - target + - properties + AttackPathsScan: + type: object + required: + - type + - id + additionalProperties: false + properties: + type: + type: string + description: The [type](https://jsonapi.org/format/#document-resource-object-identification) + member is used to describe resource objects that share common attributes + and relationships. + enum: + - attack-paths-scans + id: + type: string + format: uuid + attributes: + type: object + properties: + state: + enum: + - available + - scheduled + - executing + - completed + - failed + - cancelled + type: string + description: |- + * `available` - Available + * `scheduled` - Scheduled + * `executing` - Executing + * `completed` - Completed + * `failed` - Failed + * `cancelled` - Cancelled + x-spec-enum-id: d38ba07264e1ed34 + readOnly: true + progress: + type: integer + maximum: 2147483647 + minimum: -2147483648 + provider_alias: + type: string + readOnly: true + provider_type: + type: string + readOnly: true + provider_uid: + type: string + readOnly: true + inserted_at: + type: string + format: date-time + readOnly: true + started_at: + type: string + format: date-time + nullable: true + completed_at: + type: string + format: date-time + nullable: true + duration: + type: integer + maximum: 2147483647 + minimum: -2147483648 + nullable: true + description: Duration in seconds + relationships: + type: object + properties: + provider: + type: object + properties: + data: + type: object + properties: + id: + type: string + format: uuid + type: + type: string + enum: + - providers + title: Resource Type Name + description: The [type](https://jsonapi.org/format/#document-resource-object-identification) + member is used to describe resource objects that share common + attributes and relationships. + required: + - id + - type + required: + - data + description: The identifier of the related object. + title: Resource Identifier + scan: + type: object + properties: + data: + type: object + properties: + id: + type: string + format: uuid + type: + type: string + enum: + - scans + title: Resource Type Name + description: The [type](https://jsonapi.org/format/#document-resource-object-identification) + member is used to describe resource objects that share common + attributes and relationships. + required: + - id + - type + required: + - data + description: The identifier of the related object. + title: Resource Identifier + nullable: true + task: + type: object + properties: + data: + type: object + properties: + id: + type: string + format: uuid + type: + type: string + enum: + - tasks + title: Resource Type Name + description: The [type](https://jsonapi.org/format/#document-resource-object-identification) + member is used to describe resource objects that share common + attributes and relationships. + required: + - id + - type + required: + - data + description: The identifier of the related object. + title: Resource Identifier + nullable: true + required: + - provider + AttackPathsScanResponse: + type: object + properties: + data: + $ref: '#/components/schemas/AttackPathsScan' + required: + - data AttackSurfaceOverview: type: object required: @@ -14920,6 +15696,24 @@ components: $ref: '#/components/schemas/OverviewSeverity' required: - data + PaginatedAttackPathsQueryList: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/AttackPathsQuery' + required: + - data + PaginatedAttackPathsScanList: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/AttackPathsScan' + required: + - data PaginatedAttackSurfaceOverviewList: type: object properties: @@ -21269,6 +22063,8 @@ tags: revoking tasks that have not started. - name: Scan description: Endpoints for triggering manual scans and viewing scan results. +- name: Attack Paths + description: Endpoints for Attack Paths scan status and executing Attack Paths queries. - name: Schedule description: Endpoints for managing scan schedules, allowing configuration of automated scans with different scheduling options. diff --git a/api/src/backend/api/tests/test_attack_paths.py b/api/src/backend/api/tests/test_attack_paths.py new file mode 100644 index 0000000000..2c4e1484f8 --- /dev/null +++ b/api/src/backend/api/tests/test_attack_paths.py @@ -0,0 +1,172 @@ +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +import pytest + +from rest_framework.exceptions import APIException, ValidationError + +from api.attack_paths import database as graph_database +from api.attack_paths import views_helpers + + +def test_normalize_run_payload_extracts_attributes_section(): + payload = { + "data": { + "id": "ignored", + "attributes": { + "id": "aws-rds", + "parameters": {"ip": "192.0.2.0"}, + }, + } + } + + result = views_helpers.normalize_run_payload(payload) + + assert result == {"id": "aws-rds", "parameters": {"ip": "192.0.2.0"}} + + +def test_normalize_run_payload_passthrough_for_non_dict(): + sentinel = "not-a-dict" + assert views_helpers.normalize_run_payload(sentinel) is sentinel + + +def test_prepare_query_parameters_includes_provider_and_casts( + attack_paths_query_definition_factory, +): + definition = attack_paths_query_definition_factory(cast_type=int) + result = views_helpers.prepare_query_parameters( + definition, + {"limit": "5"}, + provider_uid="123456789012", + ) + + assert result["provider_uid"] == "123456789012" + assert result["limit"] == 5 + + +@pytest.mark.parametrize( + "provided,expected_message", + [ + ({}, "Missing required parameter"), + ({"limit": 10, "extra": True}, "Unknown parameter"), + ], +) +def test_prepare_query_parameters_validates_names( + attack_paths_query_definition_factory, provided, expected_message +): + definition = attack_paths_query_definition_factory() + + with pytest.raises(ValidationError) as exc: + views_helpers.prepare_query_parameters(definition, provided, provider_uid="1") + + assert expected_message in str(exc.value) + + +def test_prepare_query_parameters_validates_cast( + attack_paths_query_definition_factory, +): + definition = attack_paths_query_definition_factory(cast_type=int) + + with pytest.raises(ValidationError) as exc: + views_helpers.prepare_query_parameters( + definition, + {"limit": "not-an-int"}, + provider_uid="1", + ) + + assert "Invalid value" in str(exc.value) + + +def test_execute_attack_paths_query_serializes_graph( + attack_paths_query_definition_factory, attack_paths_graph_stub_classes +): + definition = attack_paths_query_definition_factory( + id="aws-rds", + name="RDS", + description="", + cypher="MATCH (n) RETURN n", + parameters=[], + ) + parameters = {"provider_uid": "123"} + attack_paths_scan = SimpleNamespace(graph_database="tenant-db") + + node = attack_paths_graph_stub_classes.Node( + element_id="node-1", + labels=["AWSAccount"], + properties={ + "name": "account", + "complex": { + "items": [ + attack_paths_graph_stub_classes.NativeValue("value"), + {"nested": 1}, + ] + }, + }, + ) + relationship = attack_paths_graph_stub_classes.Relationship( + element_id="rel-1", + rel_type="OWNS", + start_node=node, + end_node=attack_paths_graph_stub_classes.Node("node-2", ["RDSInstance"], {}), + properties={"weight": 1}, + ) + graph = SimpleNamespace(nodes=[node], relationships=[relationship]) + + run_result = MagicMock() + run_result.graph.return_value = graph + + session = MagicMock() + session.run.return_value = run_result + + session_ctx = MagicMock() + session_ctx.__enter__.return_value = session + session_ctx.__exit__.return_value = False + + with patch( + "api.attack_paths.views_helpers.graph_database.get_session", + return_value=session_ctx, + ) as mock_get_session: + result = views_helpers.execute_attack_paths_query( + attack_paths_scan, definition, parameters + ) + + mock_get_session.assert_called_once_with("tenant-db") + session.run.assert_called_once_with(definition.cypher, parameters) + assert result["nodes"][0]["id"] == "node-1" + assert result["nodes"][0]["properties"]["complex"]["items"][0] == "value" + assert result["relationships"][0]["label"] == "OWNS" + + +def test_execute_attack_paths_query_wraps_graph_errors( + attack_paths_query_definition_factory, +): + definition = attack_paths_query_definition_factory( + id="aws-rds", + name="RDS", + description="", + cypher="MATCH (n) RETURN n", + parameters=[], + ) + attack_paths_scan = SimpleNamespace(graph_database="tenant-db") + parameters = {"provider_uid": "123"} + + class ExplodingContext: + def __enter__(self): + raise graph_database.GraphDatabaseQueryException("boom") + + def __exit__(self, exc_type, exc, tb): + return False + + with ( + patch( + "api.attack_paths.views_helpers.graph_database.get_session", + return_value=ExplodingContext(), + ), + patch("api.attack_paths.views_helpers.logger") as mock_logger, + ): + with pytest.raises(APIException): + views_helpers.execute_attack_paths_query( + attack_paths_scan, definition, parameters + ) + + mock_logger.error.assert_called_once() diff --git a/api/src/backend/api/tests/test_views.py b/api/src/backend/api/tests/test_views.py index f58fe138a9..a05345aaac 100644 --- a/api/src/backend/api/tests/test_views.py +++ b/api/src/backend/api/tests/test_views.py @@ -32,6 +32,10 @@ from django_celery_results.models import TaskResult from rest_framework import status from rest_framework.response import Response +from api.attack_paths import ( + AttackPathsQueryDefinition, + AttackPathsQueryParameterDefinition, +) from api.compliance import get_compliance_frameworks from api.db_router import MainRouter from api.models import ( @@ -3602,6 +3606,420 @@ class TestTaskViewSet: assert response.status_code == status.HTTP_400_BAD_REQUEST +@pytest.mark.django_db +class TestAttackPathsScanViewSet: + @staticmethod + def _run_payload(query_id="aws-rds", parameters=None): + return { + "data": { + "type": "attack-paths-query-run-requests", + "attributes": { + "id": query_id, + "parameters": parameters or {}, + }, + } + } + + def test_attack_paths_scans_list_returns_latest_entry_per_provider( + self, + authenticated_client, + providers_fixture, + scans_fixture, + create_attack_paths_scan, + ): + provider = providers_fixture[0] + other_provider = providers_fixture[1] + + older_scan = create_attack_paths_scan( + provider, + scan=scans_fixture[0], + state=StateChoices.AVAILABLE, + progress=10, + ) + latest_scan = create_attack_paths_scan( + provider, + scan=scans_fixture[0], + state=StateChoices.COMPLETED, + progress=95, + ) + other_provider_scan = create_attack_paths_scan( + other_provider, + scan=scans_fixture[2], + state=StateChoices.FAILED, + progress=50, + ) + + response = authenticated_client.get(reverse("attack-paths-scans-list")) + + assert response.status_code == status.HTTP_200_OK + data = response.json()["data"] + ids = {item["id"] for item in data} + assert ids == {str(latest_scan.id), str(other_provider_scan.id)} + assert str(older_scan.id) not in ids + + provider_entry = next( + item + for item in data + if item["relationships"]["provider"]["data"]["id"] == str(provider.id) + ) + + first_attributes = provider_entry["attributes"] + assert first_attributes["provider_alias"] == provider.alias + assert first_attributes["provider_type"] == provider.provider + assert first_attributes["provider_uid"] == provider.uid + + def test_attack_paths_scans_list_respects_provider_group_visibility( + self, + authenticated_client_no_permissions_rbac, + providers_fixture, + create_attack_paths_scan, + ): + client = authenticated_client_no_permissions_rbac + limited_user = client.user + membership = Membership.objects.filter(user=limited_user).first() + tenant = membership.tenant + + allowed_provider = providers_fixture[0] + denied_provider = providers_fixture[1] + + allowed_scan = create_attack_paths_scan(allowed_provider) + create_attack_paths_scan(denied_provider) + + provider_group = ProviderGroup.objects.create( + name="limited-group", + tenant_id=tenant.id, + ) + ProviderGroupMembership.objects.create( + tenant_id=tenant.id, + provider_group=provider_group, + provider=allowed_provider, + ) + limited_role = limited_user.roles.first() + RoleProviderGroupRelationship.objects.create( + tenant_id=tenant.id, + role=limited_role, + provider_group=provider_group, + ) + + response = client.get(reverse("attack-paths-scans-list")) + + assert response.status_code == status.HTTP_200_OK + data = response.json()["data"] + assert len(data) == 1 + assert data[0]["id"] == str(allowed_scan.id) + + def test_attack_paths_scan_retrieve( + self, + authenticated_client, + providers_fixture, + scans_fixture, + create_attack_paths_scan, + ): + provider = providers_fixture[0] + attack_paths_scan = create_attack_paths_scan( + provider, + scan=scans_fixture[0], + state=StateChoices.COMPLETED, + progress=80, + ) + + response = authenticated_client.get( + reverse("attack-paths-scans-detail", kwargs={"pk": attack_paths_scan.id}) + ) + + assert response.status_code == status.HTTP_200_OK + data = response.json()["data"] + assert data["id"] == str(attack_paths_scan.id) + assert data["relationships"]["provider"]["data"]["id"] == str(provider.id) + assert data["attributes"]["state"] == StateChoices.COMPLETED + + def test_attack_paths_scan_retrieve_not_found_for_foreign_tenant( + self, authenticated_client, create_attack_paths_scan + ): + other_tenant = Tenant.objects.create(name="Foreign AttackPaths Tenant") + foreign_provider = Provider.objects.create( + provider="aws", + uid="333333333333", + alias="foreign", + tenant_id=other_tenant.id, + ) + foreign_scan = create_attack_paths_scan(foreign_provider) + + response = authenticated_client.get( + reverse("attack-paths-scans-detail", kwargs={"pk": foreign_scan.id}) + ) + + assert response.status_code == status.HTTP_404_NOT_FOUND + + def test_attack_paths_queries_returns_catalog( + self, + authenticated_client, + providers_fixture, + scans_fixture, + create_attack_paths_scan, + ): + provider = providers_fixture[0] + attack_paths_scan = create_attack_paths_scan( + provider, + scan=scans_fixture[0], + ) + + definitions = [ + AttackPathsQueryDefinition( + id="aws-rds", + name="RDS inventory", + description="List account RDS assets", + provider=provider.provider, + cypher="MATCH (n) RETURN n", + parameters=[ + AttackPathsQueryParameterDefinition(name="ip", label="IP address") + ], + ) + ] + + with patch( + "api.v1.views.get_queries_for_provider", return_value=definitions + ) as mock_get_queries: + response = authenticated_client.get( + reverse( + "attack-paths-scans-queries", kwargs={"pk": attack_paths_scan.id} + ) + ) + + assert response.status_code == status.HTTP_200_OK + mock_get_queries.assert_called_once_with(provider.provider) + payload = response.json()["data"] + assert len(payload) == 1 + assert payload[0]["id"] == "aws-rds" + assert payload[0]["attributes"]["name"] == "RDS inventory" + assert payload[0]["attributes"]["parameters"][0]["name"] == "ip" + + def test_attack_paths_queries_returns_404_when_catalog_missing( + self, + authenticated_client, + providers_fixture, + scans_fixture, + create_attack_paths_scan, + ): + provider = providers_fixture[0] + attack_paths_scan = create_attack_paths_scan(provider, scan=scans_fixture[0]) + + with patch("api.v1.views.get_queries_for_provider", return_value=[]): + response = authenticated_client.get( + reverse( + "attack-paths-scans-queries", kwargs={"pk": attack_paths_scan.id} + ) + ) + + assert response.status_code == status.HTTP_404_NOT_FOUND + assert "No queries found" in str(response.json()) + + def test_run_attack_paths_query_returns_graph( + self, + authenticated_client, + providers_fixture, + scans_fixture, + create_attack_paths_scan, + ): + provider = providers_fixture[0] + attack_paths_scan = create_attack_paths_scan( + provider, + scan=scans_fixture[0], + graph_database="tenant-db", + ) + query_definition = AttackPathsQueryDefinition( + id="aws-rds", + name="RDS inventory", + description="List account RDS assets", + provider=provider.provider, + cypher="MATCH (n) RETURN n", + parameters=[], + ) + prepared_parameters = {"provider_uid": provider.uid} + graph_payload = { + "nodes": [ + { + "id": "node-1", + "labels": ["AWSAccount"], + "properties": {"name": "root"}, + } + ], + "relationships": [ + { + "id": "rel-1", + "label": "OWNS", + "source": "node-1", + "target": "node-2", + "properties": {}, + } + ], + } + + with ( + patch( + "api.v1.views.get_query_by_id", return_value=query_definition + ) as mock_get_query, + patch( + "api.v1.views.attack_paths_views_helpers.prepare_query_parameters", + return_value=prepared_parameters, + ) as mock_prepare, + patch( + "api.v1.views.attack_paths_views_helpers.execute_attack_paths_query", + return_value=graph_payload, + ) as mock_execute, + ): + response = authenticated_client.post( + reverse( + "attack-paths-scans-queries-run", + kwargs={"pk": attack_paths_scan.id}, + ), + data=self._run_payload("aws-rds"), + content_type=API_JSON_CONTENT_TYPE, + ) + + assert response.status_code == status.HTTP_200_OK + mock_get_query.assert_called_once_with("aws-rds") + mock_prepare.assert_called_once_with( + query_definition, + {}, + attack_paths_scan.provider.uid, + ) + mock_execute.assert_called_once_with( + attack_paths_scan, + query_definition, + prepared_parameters, + ) + result = response.json()["data"] + attributes = result["attributes"] + assert attributes["nodes"] == graph_payload["nodes"] + assert attributes["relationships"] == graph_payload["relationships"] + + def test_run_attack_paths_query_requires_completed_scan( + self, + authenticated_client, + providers_fixture, + scans_fixture, + create_attack_paths_scan, + ): + provider = providers_fixture[0] + attack_paths_scan = create_attack_paths_scan( + provider, + scan=scans_fixture[0], + state=StateChoices.EXECUTING, + ) + + response = authenticated_client.post( + reverse( + "attack-paths-scans-queries-run", kwargs={"pk": attack_paths_scan.id} + ), + data=self._run_payload(), + content_type=API_JSON_CONTENT_TYPE, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "must be completed" in response.json()["errors"][0]["detail"] + + def test_run_attack_paths_query_requires_graph_database( + self, + authenticated_client, + providers_fixture, + scans_fixture, + create_attack_paths_scan, + ): + provider = providers_fixture[0] + attack_paths_scan = create_attack_paths_scan( + provider, + scan=scans_fixture[0], + graph_database=None, + ) + + response = authenticated_client.post( + reverse( + "attack-paths-scans-queries-run", kwargs={"pk": attack_paths_scan.id} + ), + data=self._run_payload(), + content_type=API_JSON_CONTENT_TYPE, + ) + + assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR + assert "does not reference a graph database" in str(response.json()) + + def test_run_attack_paths_query_unknown_query( + self, + authenticated_client, + providers_fixture, + scans_fixture, + create_attack_paths_scan, + ): + provider = providers_fixture[0] + attack_paths_scan = create_attack_paths_scan( + provider, + scan=scans_fixture[0], + ) + + with patch("api.v1.views.get_query_by_id", return_value=None): + response = authenticated_client.post( + reverse( + "attack-paths-scans-queries-run", + kwargs={"pk": attack_paths_scan.id}, + ), + data=self._run_payload("unknown-query"), + content_type=API_JSON_CONTENT_TYPE, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert "Unknown Attack Paths query" in response.json()["errors"][0]["detail"] + + def test_run_attack_paths_query_returns_404_when_no_nodes_found( + self, + authenticated_client, + providers_fixture, + scans_fixture, + create_attack_paths_scan, + ): + provider = providers_fixture[0] + attack_paths_scan = create_attack_paths_scan( + provider, + scan=scans_fixture[0], + ) + query_definition = AttackPathsQueryDefinition( + id="aws-empty", + name="empty", + description="", + provider=provider.provider, + cypher="MATCH (n) RETURN n", + ) + + with ( + patch("api.v1.views.get_query_by_id", return_value=query_definition), + patch( + "api.v1.views.attack_paths_views_helpers.prepare_query_parameters", + return_value={"provider_uid": provider.uid}, + ), + patch( + "api.v1.views.attack_paths_views_helpers.execute_attack_paths_query", + return_value={"nodes": [], "relationships": []}, + ), + ): + response = authenticated_client.post( + reverse( + "attack-paths-scans-queries-run", + kwargs={"pk": attack_paths_scan.id}, + ), + data=self._run_payload("aws-empty"), + content_type=API_JSON_CONTENT_TYPE, + ) + + assert response.status_code == status.HTTP_404_NOT_FOUND + payload = response.json() + if "data" in payload: + attributes = payload["data"].get("attributes", {}) + assert attributes.get("nodes") == [] + assert attributes.get("relationships") == [] + else: + assert "errors" in payload + + @pytest.mark.django_db class TestResourceViewSet: def test_resources_list_none(self, authenticated_client): @@ -9730,7 +10148,7 @@ class TestLighthouseConfigViewSet: "type": "lighthouse-configurations", "attributes": { "name": "OpenAI", - "api_key": "sk-test1234567890T3BlbkFJtest1234567890", + "api_key": "sk-fake-test-key-for-unit-testing-only", "model": "gpt-4o", "temperature": 0.7, "max_tokens": 4000, @@ -11192,7 +11610,7 @@ class TestLighthouseTenantConfigViewSet: provider_config = LighthouseProviderConfiguration.objects.create( tenant_id=tenants_fixture[0].id, provider_type="openai", - credentials=b'{"api_key": "sk-test1234567890T3BlbkFJtest1234567890"}', + credentials=b'{"api_key": "sk-fake-test-key-for-unit-testing-only"}', is_active=True, ) @@ -11328,7 +11746,7 @@ class TestLighthouseProviderConfigViewSet: "type": "lighthouse-providers", "attributes": { "provider_type": "testprovider", - "credentials": {"api_key": "sk-testT3BlbkFJkey"}, + "credentials": {"api_key": "sk-fake-test-key-1234"}, }, } } @@ -11360,7 +11778,7 @@ class TestLighthouseProviderConfigViewSet: "credentials", [ {}, # empty credentials - {"token": "sk-testT3BlbkFJkey"}, # wrong key name + {"token": "sk-fake-test-key-1234"}, # wrong key name {"api_key": "ks-invalid-format"}, # wrong format ], ) @@ -11384,7 +11802,7 @@ class TestLighthouseProviderConfigViewSet: def test_openai_valid_credentials_success(self, authenticated_client): """OpenAI provider with valid sk-xxx format should succeed""" - valid_key = "sk-abc123T3BlbkFJxyz456" + valid_key = "sk-fake-abc-test-key-xyz" payload = { "data": { "type": "lighthouse-providers", @@ -11409,7 +11827,7 @@ class TestLighthouseProviderConfigViewSet: def test_openai_provider_duplicate_per_tenant(self, authenticated_client): """If an OpenAI provider exists for tenant, creating again should error""" - valid_key = "sk-dup123T3BlbkFJdup456" + valid_key = "sk-fake-dup-test-key-456" payload = { "data": { "type": "lighthouse-providers", @@ -11438,7 +11856,7 @@ class TestLighthouseProviderConfigViewSet: def test_openai_patch_base_url_and_is_active(self, authenticated_client): """After creating, should be able to patch base_url and is_active""" - valid_key = "sk-patch123T3BlbkFJpatch456" + valid_key = "sk-fake-patch-test-key-456" create_payload = { "data": { "type": "lighthouse-providers", @@ -11478,7 +11896,7 @@ class TestLighthouseProviderConfigViewSet: def test_openai_patch_invalid_credentials(self, authenticated_client): """PATCH with invalid credentials.api_key should error (400)""" - valid_key = "sk-ok123T3BlbkFJok456" + valid_key = "sk-fake-ok-test-key-456" create_payload = { "data": { "type": "lighthouse-providers", @@ -11514,7 +11932,7 @@ class TestLighthouseProviderConfigViewSet: assert patch_resp.status_code == status.HTTP_400_BAD_REQUEST def test_openai_get_masking_and_fields_filter(self, authenticated_client): - valid_key = "sk-get123T3BlbkFJget456" + valid_key = "sk-fake-get-test-key-456" create_payload = { "data": { "type": "lighthouse-providers", @@ -11560,7 +11978,7 @@ class TestLighthouseProviderConfigViewSet: provider = LighthouseProviderConfiguration.objects.create( tenant_id=tenant.id, provider_type="openai", - credentials=b'{"api_key":"sk-test123T3BlbkFJ"}', + credentials=b'{"api_key":"sk-fake-test-key-123"}', is_active=True, ) diff --git a/api/src/backend/api/v1/serializers.py b/api/src/backend/api/v1/serializers.py index ae9d86e89e..a1f65404d1 100644 --- a/api/src/backend/api/v1/serializers.py +++ b/api/src/backend/api/v1/serializers.py @@ -21,6 +21,7 @@ from rest_framework_simplejwt.tokens import RefreshToken from api.db_router import MainRouter from api.exceptions import ConflictException from api.models import ( + AttackPathsScan, Finding, Integration, IntegrationProviderRelationship, @@ -1132,6 +1133,109 @@ class ScanComplianceReportSerializer(BaseSerializerV1): fields = ["id", "name"] +class AttackPathsScanSerializer(RLSSerializer): + state = StateEnumSerializerField(read_only=True) + provider_alias = serializers.SerializerMethodField(read_only=True) + provider_type = serializers.SerializerMethodField(read_only=True) + provider_uid = serializers.SerializerMethodField(read_only=True) + + class Meta: + model = AttackPathsScan + fields = [ + "id", + "state", + "progress", + "provider", + "provider_alias", + "provider_type", + "provider_uid", + "scan", + "task", + "inserted_at", + "started_at", + "completed_at", + "duration", + ] + + included_serializers = { + "provider": "api.v1.serializers.ProviderIncludeSerializer", + "scan": "api.v1.serializers.ScanIncludeSerializer", + "task": "api.v1.serializers.TaskSerializer", + } + + def get_provider_alias(self, obj): + provider = getattr(obj, "provider", None) + return provider.alias if provider else None + + def get_provider_type(self, obj): + provider = getattr(obj, "provider", None) + return provider.provider if provider else None + + def get_provider_uid(self, obj): + provider = getattr(obj, "provider", None) + return provider.uid if provider else None + + +class AttackPathsQueryParameterSerializer(BaseSerializerV1): + name = serializers.CharField() + label = serializers.CharField() + data_type = serializers.CharField(default="string") + description = serializers.CharField(allow_null=True, required=False) + placeholder = serializers.CharField(allow_null=True, required=False) + + class JSONAPIMeta: + resource_name = "attack-paths-query-parameters" + + +class AttackPathsQuerySerializer(BaseSerializerV1): + id = serializers.CharField() + name = serializers.CharField() + description = serializers.CharField() + provider = serializers.CharField() + parameters = AttackPathsQueryParameterSerializer(many=True) + + class JSONAPIMeta: + resource_name = "attack-paths-queries" + + +class AttackPathsQueryRunRequestSerializer(BaseSerializerV1): + id = serializers.CharField() + parameters = serializers.DictField( + child=serializers.JSONField(), allow_empty=True, required=False + ) + + class JSONAPIMeta: + resource_name = "attack-paths-query-run-requests" + + +class AttackPathsNodeSerializer(BaseSerializerV1): + id = serializers.CharField() + labels = serializers.ListField(child=serializers.CharField()) + properties = serializers.DictField(child=serializers.JSONField()) + + class JSONAPIMeta: + resource_name = "attack-paths-query-result-nodes" + + +class AttackPathsRelationshipSerializer(BaseSerializerV1): + id = serializers.CharField() + label = serializers.CharField() + source = serializers.CharField() + target = serializers.CharField() + properties = serializers.DictField(child=serializers.JSONField()) + + class JSONAPIMeta: + resource_name = "attack-paths-query-result-relationships" + + +class AttackPathsQueryResultSerializer(BaseSerializerV1): + nodes = AttackPathsNodeSerializer(many=True) + relationships = AttackPathsRelationshipSerializer(many=True) + + class JSONAPIMeta: + resource_name = "attack-paths-query-results" + + class ResourceTagSerializer(RLSSerializer): """ Serializer for the ResourceTag model diff --git a/api/src/backend/api/v1/urls.py b/api/src/backend/api/v1/urls.py index d879d1476b..840f027b42 100644 --- a/api/src/backend/api/v1/urls.py +++ b/api/src/backend/api/v1/urls.py @@ -4,6 +4,7 @@ from drf_spectacular.views import SpectacularRedocView from rest_framework_nested import routers from api.v1.views import ( + AttackPathsScanViewSet, ComplianceOverviewViewSet, CustomSAMLLoginView, CustomTokenObtainView, @@ -53,6 +54,9 @@ router.register(r"tenants", TenantViewSet, basename="tenant") router.register(r"providers", ProviderViewSet, basename="provider") router.register(r"provider-groups", ProviderGroupViewSet, basename="providergroup") router.register(r"scans", ScanViewSet, basename="scan") +router.register( + r"attack-paths-scans", AttackPathsScanViewSet, basename="attack-paths-scans" +) router.register(r"tasks", TaskViewSet, basename="task") router.register(r"resources", ResourceViewSet, basename="resource") router.register(r"findings", FindingViewSet, basename="finding") diff --git a/api/src/backend/api/v1/views.py b/api/src/backend/api/v1/views.py index 7c6fd1b9b3..c7b38e0ef8 100644 --- a/api/src/backend/api/v1/views.py +++ b/api/src/backend/api/v1/views.py @@ -3,6 +3,7 @@ import glob import json import logging import os + from collections import defaultdict from copy import deepcopy from datetime import datetime, timedelta, timezone @@ -10,6 +11,7 @@ from decimal import ROUND_HALF_UP, Decimal, InvalidOperation from urllib.parse import urljoin import sentry_sdk + from allauth.socialaccount.models import SocialAccount, SocialApp from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter @@ -41,8 +43,9 @@ from django.db.models import ( Sum, Value, When, + Window, ) -from django.db.models.functions import Coalesce +from django.db.models.functions import Coalesce, RowNumber from django.http import HttpResponse, QueryDict from django.shortcuts import redirect from django.urls import reverse @@ -72,23 +75,12 @@ from rest_framework.generics import GenericAPIView, get_object_or_404 from rest_framework.permissions import SAFE_METHODS from rest_framework_json_api.views import RelationshipView, Response from rest_framework_simplejwt.exceptions import InvalidToken, TokenError -from tasks.beat import schedule_provider_scan -from tasks.jobs.export import get_s3_client -from tasks.tasks import ( - backfill_compliance_summaries_task, - backfill_scan_resource_summaries_task, - check_integration_connection_task, - check_lighthouse_connection_task, - check_lighthouse_provider_connection_task, - check_provider_connection_task, - delete_provider_task, - delete_tenant_task, - jira_integration_task, - mute_historical_findings_task, - perform_scan_task, - refresh_lighthouse_provider_models_task, -) +from api.attack_paths import ( + get_queries_for_provider, + get_query_by_id, + views_helpers as attack_paths_views_helpers, +) from api.base_views import BaseRLSViewSet, BaseTenantViewset, BaseUserViewset from api.compliance import ( PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE, @@ -110,6 +102,7 @@ from api.filters import ( InvitationFilter, LatestFindingFilter, LatestResourceFilter, + AttackPathsScanFilter, LighthouseProviderConfigFilter, LighthouseProviderModelsFilter, MembershipFilter, @@ -138,6 +131,7 @@ from api.models import ( Finding, Integration, Invitation, + AttackPathsScan, LighthouseConfiguration, LighthouseProviderConfiguration, LighthouseProviderModels, @@ -183,6 +177,10 @@ from api.utils import ( from api.uuid_utils import datetime_to_uuid7, uuid7_start from api.v1.mixins import DisablePaginationMixin, PaginateByPkMixin, TaskManagementMixin from api.v1.serializers import ( + AttackPathsQueryRunRequestSerializer, + AttackPathsQuerySerializer, + AttackPathsQueryResultSerializer, + AttackPathsScanSerializer, AttackSurfaceOverviewSerializer, CategoryOverviewSerializer, ComplianceOverviewAttributesSerializer, @@ -265,6 +263,23 @@ from api.v1.serializers import ( UserSerializer, UserUpdateSerializer, ) +from tasks.beat import schedule_provider_scan +from tasks.jobs.attack_paths import db_utils as attack_paths_db_utils +from tasks.jobs.export import get_s3_client +from tasks.tasks import ( + backfill_compliance_summaries_task, + backfill_scan_resource_summaries_task, + check_integration_connection_task, + check_lighthouse_connection_task, + check_lighthouse_provider_connection_task, + check_provider_connection_task, + delete_provider_task, + delete_tenant_task, + jira_integration_task, + mute_historical_findings_task, + perform_scan_task, + refresh_lighthouse_provider_models_task, +) logger = logging.getLogger(BackendLogger.API) @@ -408,6 +423,10 @@ class SchemaView(SpectacularAPIView): "name": "Scan", "description": "Endpoints for triggering manual scans and viewing scan results.", }, + { + "name": "Attack Paths", + "description": "Endpoints for Attack Paths scan status and executing Attack Paths queries.", + }, { "name": "Schedule", "description": "Endpoints for managing scan schedules, allowing configuration of automated " @@ -2158,6 +2177,12 @@ class ScanViewSet(BaseRLSViewSet): }, ) + attack_paths_db_utils.create_attack_paths_scan( + tenant_id=self.request.tenant_id, + scan_id=str(scan.id), + provider_id=str(scan.provider_id), + ) + prowler_task = Task.objects.get(id=task.id) scan.task_id = task.id scan.save(update_fields=["task_id"]) @@ -2238,6 +2263,187 @@ class TaskViewSet(BaseRLSViewSet): ) +@extend_schema_view( + list=extend_schema( + tags=["Attack Paths"], + summary="List Attack Paths scans", + description="Retrieve Attack Paths scans for the tenant with support for filtering, ordering, and pagination.", + ), + retrieve=extend_schema( + tags=["Attack Paths"], + summary="Retrieve Attack Paths scan details", + description="Fetch full details for a specific Attack Paths scan.", + ), + attack_paths_queries=extend_schema( + tags=["Attack Paths"], + summary="List attack paths queries", + description="Retrieve the catalog of Attack Paths queries available for this Attack Paths scan.", + responses={ + 200: OpenApiResponse(AttackPathsQuerySerializer(many=True)), + 404: OpenApiResponse( + description="No queries found for the selected provider" + ), + }, + ), + run_attack_paths_query=extend_schema( + tags=["Attack Paths"], + summary="Execute an Attack Paths query", + description="Execute the selected Attack Paths query against the Attack Paths graph and return the resulting subgraph.", + request=AttackPathsQueryRunRequestSerializer, + responses={ + 200: OpenApiResponse(AttackPathsQueryResultSerializer), + 400: OpenApiResponse( + description="Bad request (e.g., Unknown Attack Paths query for the selected provider)" + ), + 404: OpenApiResponse( + description="No attack paths found for the given query and parameters" + ), + 500: OpenApiResponse( + description="Attack Paths query execution failed due to a database error" + ), + }, + ), +) +class AttackPathsScanViewSet(BaseRLSViewSet): + queryset = AttackPathsScan.objects.all() + serializer_class = AttackPathsScanSerializer + http_method_names = ["get", "post"] + filterset_class = AttackPathsScanFilter + ordering = ["-inserted_at"] + ordering_fields = [ + "inserted_at", + "started_at", + ] + # RBAC required permissions + required_permissions = [Permissions.MANAGE_SCANS] + + def set_required_permissions(self): + if self.request.method in SAFE_METHODS: + self.required_permissions = [] + + else: + self.required_permissions = [Permissions.MANAGE_SCANS] + + def get_serializer_class(self): + if self.action == "run_attack_paths_query": + return AttackPathsQueryRunRequestSerializer + + return super().get_serializer_class() + + def get_queryset(self): + user_roles = get_role(self.request.user) + base_queryset = AttackPathsScan.objects.filter(tenant_id=self.request.tenant_id) + + if user_roles.unlimited_visibility: + queryset = base_queryset + + else: + queryset = base_queryset.filter(provider__in=get_providers(user_roles)) + + return queryset.select_related("provider", "scan", "task") + + def list(self, request, *args, **kwargs): + queryset = self.filter_queryset(self.get_queryset()) + + latest_per_provider = queryset.annotate( + latest_scan_rank=Window( + expression=RowNumber(), + partition_by=[F("provider_id")], + order_by=[F("inserted_at").desc()], + ) + ).filter(latest_scan_rank=1) + + page = self.paginate_queryset(latest_per_provider) + if page is not None: + serializer = self.get_serializer(page, many=True) + return self.get_paginated_response(serializer.data) + + serializer = self.get_serializer(latest_per_provider, many=True) + return Response(serializer.data) + + @extend_schema(exclude=True) + def create(self, request, *args, **kwargs): + raise MethodNotAllowed(method="POST") + + @extend_schema(exclude=True) + def destroy(self, request, *args, **kwargs): + raise MethodNotAllowed(method="DELETE") + + @action( + detail=True, + methods=["get"], + url_path="queries", + url_name="queries", + ) + def attack_paths_queries(self, request, pk=None): + attack_paths_scan = self.get_object() + queries = get_queries_for_provider(attack_paths_scan.provider.provider) + + if not queries: + return Response( + {"detail": "No queries found for the selected provider"}, + status=status.HTTP_404_NOT_FOUND, + ) + + serializer = AttackPathsQuerySerializer(queries, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + @action( + detail=True, + methods=["post"], + url_path="queries/run", + url_name="queries-run", + ) + def run_attack_paths_query(self, request, pk=None): + attack_paths_scan = self.get_object() + + if attack_paths_scan.state != StateChoices.COMPLETED: + raise ValidationError( + { + "detail": "The Attack Paths scan must be completed before running Attack Paths queries" + } + ) + + if not attack_paths_scan.graph_database: + logger.error( + f"The Attack Paths Scan {attack_paths_scan.id} does not reference a graph database" + ) + return Response( + {"detail": "The Attack Paths scan does not reference a graph database"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + payload = attack_paths_views_helpers.normalize_run_payload(request.data) + serializer = AttackPathsQueryRunRequestSerializer(data=payload) + serializer.is_valid(raise_exception=True) + + query_definition = get_query_by_id(serializer.validated_data["id"]) + if ( + query_definition is None + or query_definition.provider != attack_paths_scan.provider.provider + ): + raise ValidationError( + {"id": "Unknown Attack Paths query for the selected provider"} + ) + + parameters = attack_paths_views_helpers.prepare_query_parameters( + query_definition, + serializer.validated_data.get("parameters", {}), + attack_paths_scan.provider.uid, + ) + + graph = attack_paths_views_helpers.execute_attack_paths_query( + attack_paths_scan, query_definition, parameters + ) + + status_code = status.HTTP_200_OK + if not graph.get("nodes"): + status_code = status.HTTP_404_NOT_FOUND + + response_serializer = AttackPathsQueryResultSerializer(graph) + return Response(response_serializer.data, status=status_code) + + @extend_schema_view( list=extend_schema( tags=["Resource"], @@ -5912,7 +6118,7 @@ class TenantApiKeyViewSet(BaseRLSViewSet): @extend_schema(exclude=True) def destroy(self, request, *args, **kwargs): - raise MethodNotAllowed(method="DESTROY") + raise MethodNotAllowed(method="DELETE") @action(detail=True, methods=["delete"]) def revoke(self, request, *args, **kwargs): diff --git a/api/src/backend/config/celery.py b/api/src/backend/config/celery.py index b3a0ab4b68..aaa1b1c386 100644 --- a/api/src/backend/config/celery.py +++ b/api/src/backend/config/celery.py @@ -1,6 +1,7 @@ import warnings from celery import Celery, Task + from config.env import env # Suppress specific warnings from django-rest-auth: https://github.com/iMerica/dj-rest-auth/issues/684 diff --git a/api/src/backend/config/django/devel.py b/api/src/backend/config/django/devel.py index 00d7f7dbcc..9c83557b77 100644 --- a/api/src/backend/config/django/devel.py +++ b/api/src/backend/config/django/devel.py @@ -44,6 +44,12 @@ DATABASES = { "HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host), "PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port), }, + "neo4j": { + "HOST": env.str("NEO4J_HOST", "neo4j"), + "PORT": env.str("NEO4J_PORT", "7687"), + "USER": env.str("NEO4J_USER", "neo4j"), + "PASSWORD": env.str("NEO4J_PASSWORD", "neo4j_password"), + }, } DATABASES["default"] = DATABASES["prowler_user"] diff --git a/api/src/backend/config/django/production.py b/api/src/backend/config/django/production.py index f350186ed0..b2769237fc 100644 --- a/api/src/backend/config/django/production.py +++ b/api/src/backend/config/django/production.py @@ -45,6 +45,12 @@ DATABASES = { "HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host), "PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port), }, + "neo4j": { + "HOST": env.str("NEO4J_HOST"), + "PORT": env.str("NEO4J_PORT"), + "USER": env.str("NEO4J_USER"), + "PASSWORD": env.str("NEO4J_PASSWORD"), + }, } DATABASES["default"] = DATABASES["prowler_user"] diff --git a/api/src/backend/conftest.py b/api/src/backend/conftest.py index be47c6ab42..a3b683ffea 100644 --- a/api/src/backend/conftest.py +++ b/api/src/backend/conftest.py @@ -1,8 +1,11 @@ import logging +from types import SimpleNamespace + from datetime import datetime, timedelta, timezone from unittest.mock import MagicMock, patch import pytest + from allauth.socialaccount.models import SocialLogin from django.conf import settings from django.db import connection as django_connection @@ -11,14 +14,14 @@ from django.urls import reverse from django_celery_results.models import TaskResult from rest_framework import status from rest_framework.test import APIClient -from tasks.jobs.backfill import ( - backfill_resource_scan_summaries, - backfill_scan_category_summaries, - backfill_scan_resource_group_summaries, -) +from api.attack_paths import ( + AttackPathsQueryDefinition, + AttackPathsQueryParameterDefinition, +) from api.db_utils import rls_transaction from api.models import ( + AttackPathsScan, AttackSurfaceOverview, ComplianceOverview, ComplianceRequirementOverview, @@ -56,6 +59,11 @@ from api.rls import Tenant from api.v1.serializers import TokenSerializer from prowler.lib.check.models import Severity from prowler.lib.outputs.finding import Status +from tasks.jobs.backfill import ( + backfill_resource_scan_summaries, + backfill_scan_category_summaries, + backfill_scan_resource_group_summaries, +) TODAY = str(datetime.today().date()) API_JSON_CONTENT_TYPE = "application/vnd.api+json" @@ -168,22 +176,20 @@ def create_test_user_rbac_no_roles(django_db_setup, django_db_blocker, tenants_f @pytest.fixture(scope="function") -def create_test_user_rbac_limited(django_db_setup, django_db_blocker): +def create_test_user_rbac_limited(django_db_setup, django_db_blocker, tenants_fixture): with django_db_blocker.unblock(): user = User.objects.create_user( name="testing_limited", email="rbac_limited@rbac.com", password=TEST_PASSWORD, ) - tenant = Tenant.objects.create( - name="Tenant Test", - ) + tenant = tenants_fixture[0] Membership.objects.create( user=user, tenant=tenant, role=Membership.RoleChoices.OWNER, ) - Role.objects.create( + role = Role.objects.create( name="limited", tenant_id=tenant.id, manage_users=False, @@ -196,7 +202,7 @@ def create_test_user_rbac_limited(django_db_setup, django_db_blocker): ) UserRoleRelationship.objects.create( user=user, - role=Role.objects.get(name="limited"), + role=role, tenant_id=tenant.id, ) return user @@ -1239,7 +1245,7 @@ def lighthouse_config_fixture(authenticated_client, tenants_fixture): return LighthouseConfiguration.objects.create( tenant_id=tenants_fixture[0].id, name="OpenAI", - api_key_decoded="sk-test1234567890T3BlbkFJtest1234567890", + api_key_decoded="sk-fake-test-key-for-unit-testing-only", model="gpt-4o", temperature=0, max_tokens=4000, @@ -1597,6 +1603,104 @@ def mute_rules_fixture(tenants_fixture, create_test_user, findings_fixture): return mute_rule1, mute_rule2 +@pytest.fixture +def create_attack_paths_scan(): + """Factory fixture to create Attack Paths scans for tests.""" + + def _create( + provider, + *, + scan=None, + state=StateChoices.COMPLETED, + progress=0, + graph_database="tenant-db", + **extra_fields, + ): + scan_instance = scan or Scan.objects.create( + name=extra_fields.pop("scan_name", "Attack Paths Supporting Scan"), + provider=provider, + trigger=Scan.TriggerChoices.MANUAL, + state=extra_fields.pop("scan_state", StateChoices.COMPLETED), + tenant_id=provider.tenant_id, + ) + + payload = { + "tenant_id": provider.tenant_id, + "provider": provider, + "scan": scan_instance, + "state": state, + "progress": progress, + "graph_database": graph_database, + } + payload.update(extra_fields) + + return AttackPathsScan.objects.create(**payload) + + return _create + + +@pytest.fixture +def attack_paths_query_definition_factory(): + """Factory fixture for building Attack Paths query definitions.""" + + def _create(**overrides): + cast_type = overrides.pop("cast_type", str) + parameters = overrides.pop( + "parameters", + [ + AttackPathsQueryParameterDefinition( + name="limit", + label="Limit", + cast=cast_type, + ) + ], + ) + definition_payload = { + "id": "aws-test", + "name": "Attack Paths Test Query", + "description": "Synthetic Attack Paths definition for tests.", + "provider": "aws", + "cypher": "RETURN 1", + "parameters": parameters, + } + definition_payload.update(overrides) + return AttackPathsQueryDefinition(**definition_payload) + + return _create + + +@pytest.fixture +def attack_paths_graph_stub_classes(): + """Provide lightweight graph element stubs for Attack Paths serialization tests.""" + + class AttackPathsNativeValue: + def __init__(self, value): + self._value = value + + def to_native(self): + return self._value + + class AttackPathsNode: + def __init__(self, element_id, labels, properties): + self.element_id = element_id + self.labels = labels + self._properties = properties + + class AttackPathsRelationship: + def __init__(self, element_id, rel_type, start_node, end_node, properties): + self.element_id = element_id + self.type = rel_type + self.start_node = start_node + self.end_node = end_node + self._properties = properties + + return SimpleNamespace( + NativeValue=AttackPathsNativeValue, + Node=AttackPathsNode, + Relationship=AttackPathsRelationship, + ) + + @pytest.fixture def create_attack_surface_overview(): def _create(tenant, scan, attack_surface_type, total=10, failed=5, muted_failed=2): diff --git a/api/src/backend/tasks/beat.py b/api/src/backend/tasks/beat.py index 262d47496a..e9eb9c9309 100644 --- a/api/src/backend/tasks/beat.py +++ b/api/src/backend/tasks/beat.py @@ -7,6 +7,7 @@ from tasks.tasks import perform_scheduled_scan_task from api.db_utils import rls_transaction from api.exceptions import ConflictException from api.models import Provider, Scan, StateChoices +from tasks.jobs.attack_paths import db_utils as attack_paths_db_utils def schedule_provider_scan(provider_instance: Provider): @@ -39,6 +40,12 @@ def schedule_provider_scan(provider_instance: Provider): scheduled_at=datetime.now(timezone.utc), ) + attack_paths_db_utils.create_attack_paths_scan( + tenant_id=tenant_id, + scan_id=str(scheduled_scan.id), + provider_id=provider_id, + ) + # Schedule the task periodic_task_instance = PeriodicTask.objects.create( interval=schedule, diff --git a/api/src/backend/tasks/jobs/attack_paths/__init__.py b/api/src/backend/tasks/jobs/attack_paths/__init__.py new file mode 100644 index 0000000000..8fb57bc907 --- /dev/null +++ b/api/src/backend/tasks/jobs/attack_paths/__init__.py @@ -0,0 +1,7 @@ +from tasks.jobs.attack_paths.db_utils import can_provider_run_attack_paths_scan +from tasks.jobs.attack_paths.scan import run as attack_paths_scan + +__all__ = [ + "attack_paths_scan", + "can_provider_run_attack_paths_scan", +] diff --git a/api/src/backend/tasks/jobs/attack_paths/aws.py b/api/src/backend/tasks/jobs/attack_paths/aws.py new file mode 100644 index 0000000000..e244b6cca7 --- /dev/null +++ b/api/src/backend/tasks/jobs/attack_paths/aws.py @@ -0,0 +1,237 @@ +# Portions of this file are based on code from the Cartography project +# (https://github.com/cartography-cncf/cartography), which is licensed under the Apache 2.0 License. + +from typing import Any + +import aioboto3 +import boto3 +import neo4j + +from cartography.config import Config as CartographyConfig +from cartography.intel import aws as cartography_aws +from celery.utils.log import get_task_logger + +from api.models import ( + AttackPathsScan as ProwlerAPIAttackPathsScan, + Provider as ProwlerAPIProvider, +) +from prowler.providers.common.provider import Provider as ProwlerSDKProvider +from tasks.jobs.attack_paths import db_utils, utils + +logger = get_task_logger(__name__) + + +def start_aws_ingestion( + neo4j_session: neo4j.Session, + cartography_config: CartographyConfig, + prowler_api_provider: ProwlerAPIProvider, + prowler_sdk_provider: ProwlerSDKProvider, + attack_paths_scan: ProwlerAPIAttackPathsScan, +) -> dict[str, dict[str, str]]: + """ + Code based on Cartography version 0.122.0, specifically on `cartography.intel.aws.__init__.py`. + + For the scan progress updates: + - The caller of this function (`tasks.jobs.attack_paths.scan.run`) has set it to 2. + - When the control returns to the caller, it will be set to 95. + """ + + # Initialize variables common to all jobs + common_job_parameters = { + "UPDATE_TAG": cartography_config.update_tag, + "permission_relationships_file": cartography_config.permission_relationships_file, + "aws_guardduty_severity_threshold": cartography_config.aws_guardduty_severity_threshold, + "aws_cloudtrail_management_events_lookback_hours": cartography_config.aws_cloudtrail_management_events_lookback_hours, + "experimental_aws_inspector_batch": cartography_config.experimental_aws_inspector_batch, + } + + boto3_session = get_boto3_session(prowler_api_provider, prowler_sdk_provider) + regions: list[str] = list(prowler_sdk_provider._enabled_regions) + requested_syncs = list(cartography_aws.RESOURCE_FUNCTIONS.keys()) + + sync_args = cartography_aws._build_aws_sync_kwargs( + neo4j_session, + boto3_session, + regions, + prowler_api_provider.uid, + cartography_config.update_tag, + common_job_parameters, + ) + + # Starting with sync functions + cartography_aws.organizations.sync( + neo4j_session, + {prowler_api_provider.alias: prowler_api_provider.uid}, + cartography_config.update_tag, + common_job_parameters, + ) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 3) + + # Adding an extra field + common_job_parameters["AWS_ID"] = prowler_api_provider.uid + + cartography_aws._autodiscover_accounts( + neo4j_session, + boto3_session, + prowler_api_provider.uid, + cartography_config.update_tag, + common_job_parameters, + ) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 4) + + failed_syncs = sync_aws_account( + prowler_api_provider, requested_syncs, sync_args, attack_paths_scan + ) + + if "permission_relationships" in requested_syncs: + cartography_aws.RESOURCE_FUNCTIONS["permission_relationships"](**sync_args) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 88) + + if "resourcegroupstaggingapi" in requested_syncs: + cartography_aws.RESOURCE_FUNCTIONS["resourcegroupstaggingapi"](**sync_args) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 89) + + cartography_aws.run_scoped_analysis_job( + "aws_ec2_iaminstanceprofile.json", + neo4j_session, + common_job_parameters, + ) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 90) + + cartography_aws.run_analysis_job( + "aws_lambda_ecr.json", + neo4j_session, + common_job_parameters, + ) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 91) + + cartography_aws.merge_module_sync_metadata( + neo4j_session, + group_type="AWSAccount", + group_id=prowler_api_provider.uid, + synced_type="AWSAccount", + update_tag=cartography_config.update_tag, + stat_handler=cartography_aws.stat_handler, + ) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 92) + + # Removing the added extra field + del common_job_parameters["AWS_ID"] + + cartography_aws.run_cleanup_job( + "aws_post_ingestion_principals_cleanup.json", + neo4j_session, + common_job_parameters, + ) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 93) + + cartography_aws._perform_aws_analysis( + requested_syncs, neo4j_session, common_job_parameters + ) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 94) + + return failed_syncs + + +def get_boto3_session( + prowler_api_provider: ProwlerAPIProvider, prowler_sdk_provider: ProwlerSDKProvider +) -> boto3.Session: + boto3_session = prowler_sdk_provider.session.current_session + + aws_accounts_from_session = cartography_aws.organizations.get_aws_account_default( + boto3_session + ) + if not aws_accounts_from_session: + raise Exception( + "No valid AWS credentials could be found. No AWS accounts can be synced." + ) + + aws_account_id_from_session = list(aws_accounts_from_session.values())[0] + if prowler_api_provider.uid != aws_account_id_from_session: + raise Exception( + f"Provider {prowler_api_provider.uid} doesn't match AWS account {aws_account_id_from_session}." + ) + + if boto3_session.region_name is None: + global_region = prowler_sdk_provider.get_global_region() + boto3_session._session.set_config_variable("region", global_region) + + return boto3_session + + +def get_aioboto3_session(boto3_session: boto3.Session) -> aioboto3.Session: + return aioboto3.Session(botocore_session=boto3_session._session) + + +def sync_aws_account( + prowler_api_provider: ProwlerAPIProvider, + requested_syncs: list[str], + sync_args: dict[str, Any], + attack_paths_scan: ProwlerAPIAttackPathsScan, +) -> dict[str, str]: + current_progress = 4 # `cartography_aws._autodiscover_accounts` + max_progress = ( + 87 # `cartography_aws.RESOURCE_FUNCTIONS["permission_relationships"]` - 1 + ) + n_steps = ( + len(requested_syncs) - 2 + ) # Excluding `permission_relationships` and `resourcegroupstaggingapi` + progress_step = (max_progress - current_progress) / n_steps + + failed_syncs = {} + + for func_name in requested_syncs: + if func_name in cartography_aws.RESOURCE_FUNCTIONS: + logger.info( + f"Syncing function {func_name} for AWS account {prowler_api_provider.uid}" + ) + + # Updating progress, not really the right place but good enough + current_progress += progress_step + db_utils.update_attack_paths_scan_progress( + attack_paths_scan, int(current_progress) + ) + + try: + # `ecr:image_layers` uses `aioboto3_session` instead of `boto3_session` + if func_name == "ecr:image_layers": + cartography_aws.RESOURCE_FUNCTIONS[func_name]( + neo4j_session=sync_args.get("neo4j_session"), + aioboto3_session=get_aioboto3_session( + sync_args.get("boto3_session") + ), + regions=sync_args.get("regions"), + current_aws_account_id=sync_args.get("current_aws_account_id"), + update_tag=sync_args.get("update_tag"), + common_job_parameters=sync_args.get("common_job_parameters"), + ) + + # Skip permission relationships and tags for now because they rely on data already being in the graph + elif func_name in [ + "permission_relationships", + "resourcegroupstaggingapi", + ]: + continue + + else: + cartography_aws.RESOURCE_FUNCTIONS[func_name](**sync_args) + + except Exception as e: + exception_message = utils.stringify_exception( + e, f"Exception for AWS sync function: {func_name}" + ) + failed_syncs[func_name] = exception_message + + logger.warning( + f"Caught exception syncing function {func_name} from AWS account {prowler_api_provider.uid}. We " + "are continuing on to the next AWS sync function.", + ) + + continue + + else: + raise ValueError( + f'AWS sync function "{func_name}" was specified but does not exist. Did you misspell it?' + ) + + return failed_syncs diff --git a/api/src/backend/tasks/jobs/attack_paths/db_utils.py b/api/src/backend/tasks/jobs/attack_paths/db_utils.py new file mode 100644 index 0000000000..63451ef74d --- /dev/null +++ b/api/src/backend/tasks/jobs/attack_paths/db_utils.py @@ -0,0 +1,161 @@ +from datetime import datetime, timezone +from typing import Any + +from cartography.config import Config as CartographyConfig + +from api.db_utils import rls_transaction +from api.models import ( + AttackPathsScan as ProwlerAPIAttackPathsScan, + Provider as ProwlerAPIProvider, + StateChoices, +) +from tasks.jobs.attack_paths.providers import is_provider_available + + +def can_provider_run_attack_paths_scan(tenant_id: str, provider_id: int) -> bool: + with rls_transaction(tenant_id): + prowler_api_provider = ProwlerAPIProvider.objects.get(id=provider_id) + + return is_provider_available(prowler_api_provider.provider) + + +def create_attack_paths_scan( + tenant_id: str, + scan_id: str, + provider_id: int, +) -> ProwlerAPIAttackPathsScan | None: + if not can_provider_run_attack_paths_scan(tenant_id, provider_id): + return None + + with rls_transaction(tenant_id): + attack_paths_scan = ProwlerAPIAttackPathsScan.objects.create( + tenant_id=tenant_id, + provider_id=provider_id, + scan_id=scan_id, + state=StateChoices.SCHEDULED, + started_at=datetime.now(tz=timezone.utc), + ) + attack_paths_scan.save() + + return attack_paths_scan + + +def retrieve_attack_paths_scan( + tenant_id: str, + scan_id: str, +) -> ProwlerAPIAttackPathsScan | None: + try: + with rls_transaction(tenant_id): + attack_paths_scan = ProwlerAPIAttackPathsScan.objects.get( + scan_id=scan_id, + ) + + return attack_paths_scan + + except ProwlerAPIAttackPathsScan.DoesNotExist: + return None + + +def starting_attack_paths_scan( + attack_paths_scan: ProwlerAPIAttackPathsScan, + task_id: str, + cartography_config: CartographyConfig, +) -> None: + with rls_transaction(attack_paths_scan.tenant_id): + attack_paths_scan.task_id = task_id + attack_paths_scan.state = StateChoices.EXECUTING + attack_paths_scan.started_at = datetime.now(tz=timezone.utc) + attack_paths_scan.update_tag = cartography_config.update_tag + attack_paths_scan.graph_database = cartography_config.neo4j_database + + attack_paths_scan.save( + update_fields=[ + "task_id", + "state", + "started_at", + "update_tag", + "graph_database", + ] + ) + + +def finish_attack_paths_scan( + attack_paths_scan: ProwlerAPIAttackPathsScan, + state: StateChoices, + ingestion_exceptions: dict[str, Any], +) -> None: + with rls_transaction(attack_paths_scan.tenant_id): + now = datetime.now(tz=timezone.utc) + duration = int((now - attack_paths_scan.started_at).total_seconds()) + + attack_paths_scan.state = state + attack_paths_scan.progress = 100 + attack_paths_scan.completed_at = now + attack_paths_scan.duration = duration + attack_paths_scan.ingestion_exceptions = ingestion_exceptions + + attack_paths_scan.save( + update_fields=[ + "state", + "progress", + "completed_at", + "duration", + "ingestion_exceptions", + ] + ) + + +def update_attack_paths_scan_progress( + attack_paths_scan: ProwlerAPIAttackPathsScan, + progress: int, +) -> None: + with rls_transaction(attack_paths_scan.tenant_id): + attack_paths_scan.progress = progress + attack_paths_scan.save(update_fields=["progress"]) + + +def get_old_attack_paths_scans( + tenant_id: str, + provider_id: str, + attack_paths_scan_id: str, +) -> list[ProwlerAPIAttackPathsScan]: + """ + An `old_attack_paths_scan` is any `completed` Attack Paths scan for the same provider, + with its graph database not deleted, excluding the current Attack Paths scan. + """ + + with rls_transaction(tenant_id): + completed_scans_qs = ( + ProwlerAPIAttackPathsScan.objects.filter( + provider_id=provider_id, + state=StateChoices.COMPLETED, + is_graph_database_deleted=False, + ) + .exclude(id=attack_paths_scan_id) + .all() + ) + + return list(completed_scans_qs) + + +def update_old_attack_paths_scan( + old_attack_paths_scan: ProwlerAPIAttackPathsScan, +) -> None: + with rls_transaction(old_attack_paths_scan.tenant_id): + old_attack_paths_scan.is_graph_database_deleted = True + old_attack_paths_scan.save(update_fields=["is_graph_database_deleted"]) + + +def get_provider_graph_database_names(tenant_id: str, provider_id: str) -> list[str]: + """ + Return existing graph database names for a tenant/provider. + + Note: For accesing the `AttackPathsScan` we need to use `all_objects` manager because the provider is soft-deleted. + """ + with rls_transaction(tenant_id): + graph_databases_names_qs = ProwlerAPIAttackPathsScan.all_objects.filter( + provider_id=provider_id, + is_graph_database_deleted=False, + ).values_list("graph_database", flat=True) + + return list(graph_databases_names_qs) diff --git a/api/src/backend/tasks/jobs/attack_paths/providers.py b/api/src/backend/tasks/jobs/attack_paths/providers.py new file mode 100644 index 0000000000..a0d4c44551 --- /dev/null +++ b/api/src/backend/tasks/jobs/attack_paths/providers.py @@ -0,0 +1,23 @@ +AVAILABLE_PROVIDERS: list[str] = [ + "aws", +] + +ROOT_NODE_LABELS: dict[str, str] = { + "aws": "AWSAccount", +} + +NODE_UID_FIELDS: dict[str, str] = { + "aws": "arn", +} + + +def is_provider_available(provider_type: str) -> bool: + return provider_type in AVAILABLE_PROVIDERS + + +def get_root_node_label(provider_type: str) -> str: + return ROOT_NODE_LABELS.get(provider_type, "UnknownProviderAccount") + + +def get_node_uid_field(provider_type: str) -> str: + return NODE_UID_FIELDS.get(provider_type, "UnknownProviderUID") diff --git a/api/src/backend/tasks/jobs/attack_paths/prowler.py b/api/src/backend/tasks/jobs/attack_paths/prowler.py new file mode 100644 index 0000000000..8678045811 --- /dev/null +++ b/api/src/backend/tasks/jobs/attack_paths/prowler.py @@ -0,0 +1,205 @@ +import neo4j + +from cartography.client.core.tx import run_write_query +from cartography.config import Config as CartographyConfig +from celery.utils.log import get_task_logger + +from api.db_utils import rls_transaction +from api.models import Provider, ResourceFindingMapping +from config.env import env +from prowler.config import config as ProwlerConfig +from tasks.jobs.attack_paths.providers import get_node_uid_field, get_root_node_label + +logger = get_task_logger(__name__) + +BATCH_SIZE = env.int("NEO4J_INSERT_BATCH_SIZE", 500) + +INDEX_STATEMENTS = [ + "CREATE INDEX prowler_finding_id IF NOT EXISTS FOR (n:ProwlerFinding) ON (n.id);", + "CREATE INDEX prowler_finding_provider_uid IF NOT EXISTS FOR (n:ProwlerFinding) ON (n.provider_uid);", + "CREATE INDEX prowler_finding_lastupdated IF NOT EXISTS FOR (n:ProwlerFinding) ON (n.lastupdated);", + "CREATE INDEX prowler_finding_check_id IF NOT EXISTS FOR (n:ProwlerFinding) ON (n.status);", +] + +INSERT_STATEMENT_TEMPLATE = """ + UNWIND $findings_data AS finding_data + + MATCH (account:__ROOT_NODE_LABEL__ {id: $provider_uid}) + MATCH (account)-->(resource) + WHERE resource.__NODE_UID_FIELD__ = finding_data.resource_uid + OR resource.id = finding_data.resource_uid + + MERGE (finding:ProwlerFinding {id: finding_data.id}) + ON CREATE SET + finding.id = finding_data.id, + finding.uid = finding_data.uid, + finding.inserted_at = finding_data.inserted_at, + finding.updated_at = finding_data.updated_at, + finding.first_seen_at = finding_data.first_seen_at, + finding.scan_id = finding_data.scan_id, + finding.delta = finding_data.delta, + finding.status = finding_data.status, + finding.status_extended = finding_data.status_extended, + finding.severity = finding_data.severity, + finding.check_id = finding_data.check_id, + finding.check_title = finding_data.check_title, + finding.muted = finding_data.muted, + finding.muted_reason = finding_data.muted_reason, + finding.provider_uid = $provider_uid, + finding.firstseen = timestamp(), + finding.lastupdated = $last_updated, + finding._module_name = 'cartography:prowler', + finding._module_version = $prowler_version + ON MATCH SET + finding.status = finding_data.status, + finding.status_extended = finding_data.status_extended, + finding.lastupdated = $last_updated + + MERGE (resource)-[rel:HAS_FINDING]->(finding) + ON CREATE SET + rel.provider_uid = $provider_uid, + rel.firstseen = timestamp(), + rel.lastupdated = $last_updated, + rel._module_name = 'cartography:prowler', + rel._module_version = $prowler_version + ON MATCH SET + rel.lastupdated = $last_updated +""" + +CLEANUP_STATEMENT = """ + MATCH (finding:ProwlerFinding {provider_uid: $provider_uid}) + WHERE finding.lastupdated < $last_updated + + WITH finding LIMIT $batch_size + + DETACH DELETE finding + + RETURN COUNT(finding) AS deleted_findings_count +""" + + +def create_indexes(neo4j_session: neo4j.Session) -> None: + """ + Code based on Cartography version 0.122.0, specifically on `cartography.intel.create_indexes.run`. + """ + + logger.info("Creating indexes for Prowler node types.") + for statement in INDEX_STATEMENTS: + logger.debug("Executing statement: %s", statement) + run_write_query(neo4j_session, statement) + + +def analysis( + neo4j_session: neo4j.Session, + prowler_api_provider: Provider, + scan_id: str, + config: CartographyConfig, +) -> None: + findings_data = get_provider_last_scan_findings(prowler_api_provider, scan_id) + load_findings(neo4j_session, findings_data, prowler_api_provider, config) + cleanup_findings(neo4j_session, prowler_api_provider, config) + + +def get_provider_last_scan_findings( + prowler_api_provider: Provider, + scan_id: str, +) -> list[dict[str, str]]: + with rls_transaction(prowler_api_provider.tenant_id): + resource_finding_qs = ResourceFindingMapping.objects.filter( + finding__scan_id=scan_id, + ).values( + "resource__uid", + "finding__id", + "finding__uid", + "finding__inserted_at", + "finding__updated_at", + "finding__first_seen_at", + "finding__scan_id", + "finding__delta", + "finding__status", + "finding__status_extended", + "finding__severity", + "finding__check_id", + "finding__check_metadata__checktitle", + "finding__muted", + "finding__muted_reason", + ) + + findings = [] + for resource_finding in resource_finding_qs: + findings.append( + { + "resource_uid": str(resource_finding["resource__uid"]), + "id": str(resource_finding["finding__id"]), + "uid": resource_finding["finding__uid"], + "inserted_at": resource_finding["finding__inserted_at"], + "updated_at": resource_finding["finding__updated_at"], + "first_seen_at": resource_finding["finding__first_seen_at"], + "scan_id": str(resource_finding["finding__scan_id"]), + "delta": resource_finding["finding__delta"], + "status": resource_finding["finding__status"], + "status_extended": resource_finding["finding__status_extended"], + "severity": resource_finding["finding__severity"], + "check_id": str(resource_finding["finding__check_id"]), + "check_title": resource_finding[ + "finding__check_metadata__checktitle" + ], + "muted": resource_finding["finding__muted"], + "muted_reason": resource_finding["finding__muted_reason"], + } + ) + + return findings + + +def load_findings( + neo4j_session: neo4j.Session, + findings_data: list[dict[str, str]], + prowler_api_provider: Provider, + config: CartographyConfig, +) -> None: + replacements = { + "__ROOT_NODE_LABEL__": get_root_node_label(prowler_api_provider.provider), + "__NODE_UID_FIELD__": get_node_uid_field(prowler_api_provider.provider), + } + query = INSERT_STATEMENT_TEMPLATE + for replace_key, replace_value in replacements.items(): + query = query.replace(replace_key, replace_value) + + parameters = { + "provider_uid": str(prowler_api_provider.uid), + "last_updated": config.update_tag, + "prowler_version": ProwlerConfig.prowler_version, + } + + total_length = len(findings_data) + for i in range(0, total_length, BATCH_SIZE): + parameters["findings_data"] = findings_data[i : i + BATCH_SIZE] + + logger.info( + f"Loading findings batch {i // BATCH_SIZE + 1} / {(total_length + BATCH_SIZE - 1) // BATCH_SIZE}" + ) + + neo4j_session.run(query, parameters) + + +def cleanup_findings( + neo4j_session: neo4j.Session, + prowler_api_provider: Provider, + config: CartographyConfig, +) -> None: + parameters = { + "provider_uid": str(prowler_api_provider.uid), + "last_updated": config.update_tag, + "batch_size": BATCH_SIZE, + } + + batch = 1 + deleted_count = 1 + while deleted_count > 0: + logger.info(f"Cleaning findings batch {batch}") + + result = neo4j_session.run(CLEANUP_STATEMENT, parameters) + + deleted_count = result.single().get("deleted_findings_count", 0) + batch += 1 diff --git a/api/src/backend/tasks/jobs/attack_paths/scan.py b/api/src/backend/tasks/jobs/attack_paths/scan.py new file mode 100644 index 0000000000..d2cf275842 --- /dev/null +++ b/api/src/backend/tasks/jobs/attack_paths/scan.py @@ -0,0 +1,183 @@ +import logging +import time +import asyncio + +from typing import Any, Callable + +from cartography.config import Config as CartographyConfig +from cartography.intel import analysis as cartography_analysis +from cartography.intel import create_indexes as cartography_create_indexes +from cartography.intel import ontology as cartography_ontology +from celery.utils.log import get_task_logger + +from api.attack_paths import database as graph_database +from api.db_utils import rls_transaction +from api.models import ( + Provider as ProwlerAPIProvider, + StateChoices, +) +from api.utils import initialize_prowler_provider +from tasks.jobs.attack_paths import aws, db_utils, prowler, utils + +# Without this Celery goes crazy with Cartography logging +logging.getLogger("cartography").setLevel(logging.ERROR) +logging.getLogger("neo4j").propagate = False + +logger = get_task_logger(__name__) + +CARTOGRAPHY_INGESTION_FUNCTIONS: dict[str, Callable] = { + "aws": aws.start_aws_ingestion, +} + + +def get_cartography_ingestion_function(provider_type: str) -> Callable | None: + return CARTOGRAPHY_INGESTION_FUNCTIONS.get(provider_type) + + +def run(tenant_id: str, scan_id: str, task_id: str) -> dict[str, Any]: + """ + Code based on Cartography version 0.122.0, specifically on `cartography.cli.main`, `cartography.cli.CLI.main`, + `cartography.sync.run_with_config` and `cartography.sync.Sync.run`. + """ + ingestion_exceptions = {} # This will hold any exceptions raised during ingestion + + # Prowler necessary objects + with rls_transaction(tenant_id): + prowler_api_provider = ProwlerAPIProvider.objects.get(scan__pk=scan_id) + prowler_sdk_provider = initialize_prowler_provider(prowler_api_provider) + + # Attack Paths Scan necessary objects + cartography_ingestion_function = get_cartography_ingestion_function( + prowler_api_provider.provider + ) + attack_paths_scan = db_utils.retrieve_attack_paths_scan(tenant_id, scan_id) + + # Checks before starting the scan + if not cartography_ingestion_function: + ingestion_exceptions = { + "global_error": f"Provider {prowler_api_provider.provider} is not supported for Attack Paths scans" + } + if attack_paths_scan: + db_utils.finish_attack_paths_scan( + attack_paths_scan, StateChoices.COMPLETED, ingestion_exceptions + ) + + logger.warning( + f"Provider {prowler_api_provider.provider} is not supported for Attack Paths scans" + ) + return ingestion_exceptions + + else: + if not attack_paths_scan: + logger.warning( + f"No Attack Paths Scan found for scan {scan_id} and tenant {tenant_id}, let's create it then" + ) + attack_paths_scan = db_utils.create_attack_paths_scan( + tenant_id, scan_id, prowler_api_provider.id + ) + + # While creating the Cartography configuration, attributes `neo4j_user` and `neo4j_password` are not really needed in this config object + cartography_config = CartographyConfig( + neo4j_uri=graph_database.get_uri(), + neo4j_database=graph_database.get_database_name(attack_paths_scan.id), + update_tag=int(time.time()), + ) + + # Starting the Attack Paths scan + db_utils.starting_attack_paths_scan(attack_paths_scan, task_id, cartography_config) + + try: + logger.info( + f"Creating Neo4j database {cartography_config.neo4j_database} for tenant {prowler_api_provider.tenant_id}" + ) + + graph_database.create_database(cartography_config.neo4j_database) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 1) + + logger.info( + f"Starting Cartography ({attack_paths_scan.id}) for " + f"{prowler_api_provider.provider.upper()} provider {prowler_api_provider.id}" + ) + with graph_database.get_session( + cartography_config.neo4j_database + ) as neo4j_session: + # Indexes creation + cartography_create_indexes.run(neo4j_session, cartography_config) + prowler.create_indexes(neo4j_session) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 2) + + # The real scan, where iterates over cloud services + ingestion_exceptions = _call_within_event_loop( + cartography_ingestion_function, + neo4j_session, + cartography_config, + prowler_api_provider, + prowler_sdk_provider, + attack_paths_scan, + ) + + # Post-processing: Just keeping it to be more Cartography compliant + cartography_ontology.run(neo4j_session, cartography_config) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 95) + + cartography_analysis.run(neo4j_session, cartography_config) + db_utils.update_attack_paths_scan_progress(attack_paths_scan, 96) + + # Adding Prowler nodes and relationships + prowler.analysis( + neo4j_session, prowler_api_provider, scan_id, cartography_config + ) + + logger.info( + f"Completed Cartography ({attack_paths_scan.id}) for " + f"{prowler_api_provider.provider.upper()} provider {prowler_api_provider.id}" + ) + + # Handling databases changes + old_attack_paths_scans = db_utils.get_old_attack_paths_scans( + prowler_api_provider.tenant_id, + prowler_api_provider.id, + attack_paths_scan.id, + ) + for old_attack_paths_scan in old_attack_paths_scans: + graph_database.drop_database(old_attack_paths_scan.graph_database) + db_utils.update_old_attack_paths_scan(old_attack_paths_scan) + + db_utils.finish_attack_paths_scan( + attack_paths_scan, StateChoices.COMPLETED, ingestion_exceptions + ) + return ingestion_exceptions + + except Exception as e: + exception_message = utils.stringify_exception(e, "Cartography failed") + logger.error(exception_message) + ingestion_exceptions["global_cartography_error"] = exception_message + + # Handling databases changes + graph_database.drop_database(cartography_config.neo4j_database) + db_utils.finish_attack_paths_scan( + attack_paths_scan, StateChoices.FAILED, ingestion_exceptions + ) + raise + + +def _call_within_event_loop(fn, *args, **kwargs): + """ + Cartography needs a running event loop, so assuming there is none (Celery task or even regular DRF endpoint), + let's create a new one and set it as the current event loop for this thread. + """ + + loop = asyncio.new_event_loop() + try: + asyncio.set_event_loop(loop) + return fn(*args, **kwargs) + + finally: + try: + loop.run_until_complete(loop.shutdown_asyncgens()) + + except Exception as e: + logger.warning(f"Failed to shutdown async generators cleanly: {e}") + + loop.close() + asyncio.set_event_loop(None) diff --git a/api/src/backend/tasks/jobs/attack_paths/utils.py b/api/src/backend/tasks/jobs/attack_paths/utils.py new file mode 100644 index 0000000000..0c737d4158 --- /dev/null +++ b/api/src/backend/tasks/jobs/attack_paths/utils.py @@ -0,0 +1,10 @@ +import traceback + +from datetime import datetime, timezone + + +def stringify_exception(exception: Exception, context: str) -> str: + timestamp = datetime.now(tz=timezone.utc) + exception_traceback = traceback.TracebackException.from_exception(exception) + traceback_string = "".join(exception_traceback.format()) + return f"{timestamp} - {context}\n{traceback_string}" diff --git a/api/src/backend/tasks/jobs/deletion.py b/api/src/backend/tasks/jobs/deletion.py index d72b8de40e..6eee63de6a 100644 --- a/api/src/backend/tasks/jobs/deletion.py +++ b/api/src/backend/tasks/jobs/deletion.py @@ -1,9 +1,19 @@ from celery.utils.log import get_task_logger from django.db import DatabaseError +from api.attack_paths import database as graph_database from api.db_router import MainRouter from api.db_utils import batch_delete, rls_transaction -from api.models import Finding, Provider, Resource, Scan, ScanSummary, Tenant +from api.models import ( + AttackPathsScan, + Finding, + Provider, + Resource, + Scan, + ScanSummary, + Tenant, +) +from tasks.jobs.attack_paths.db_utils import get_provider_graph_database_names logger = get_task_logger(__name__) @@ -23,16 +33,27 @@ def delete_provider(tenant_id: str, pk: str): Raises: Provider.DoesNotExist: If no instance with the provided primary key exists. """ + # Delete the Attack Paths' graph databases related to the provider + graph_database_names = get_provider_graph_database_names(tenant_id, pk) + try: + for graph_database_name in graph_database_names: + graph_database.drop_database(graph_database_name) + except graph_database.GraphDatabaseQueryException as gdb_error: + logger.error(f"Error deleting Provider databases: {gdb_error}") + raise + + # Get all provider related data and delete them in batches with rls_transaction(tenant_id): instance = Provider.all_objects.get(pk=pk) - deletion_summary = {} deletion_steps = [ ("Scan Summaries", ScanSummary.all_objects.filter(scan__provider=instance)), ("Findings", Finding.all_objects.filter(scan__provider=instance)), ("Resources", Resource.all_objects.filter(provider=instance)), ("Scans", Scan.all_objects.filter(provider=instance)), + ("AttackPathsScans", AttackPathsScan.all_objects.filter(provider=instance)), ] + deletion_summary = {} for step_name, queryset in deletion_steps: try: _, step_summary = batch_delete(tenant_id, queryset) @@ -48,6 +69,7 @@ def delete_provider(tenant_id: str, pk: str): except DatabaseError as db_error: logger.error(f"Error deleting Provider: {db_error}") raise + return deletion_summary diff --git a/api/src/backend/tasks/tasks.py b/api/src/backend/tasks/tasks.py index 91e1ac85ea..f900126db6 100644 --- a/api/src/backend/tasks/tasks.py +++ b/api/src/backend/tasks/tasks.py @@ -1,13 +1,29 @@ import os + from datetime import datetime, timedelta, timezone from pathlib import Path from shutil import rmtree from celery import chain, group, shared_task from celery.utils.log import get_task_logger +from django_celery_beat.models import PeriodicTask + +from api.compliance import get_compliance_frameworks +from api.db_router import READ_REPLICA_ALIAS +from api.db_utils import rls_transaction +from api.decorators import handle_provider_deletion, set_tenant +from api.models import Finding, Integration, Provider, Scan, ScanSummary, StateChoices +from api.utils import initialize_prowler_provider +from api.v1.serializers import ScanTaskSerializer from config.celery import RLSTask from config.django.base import DJANGO_FINDINGS_BATCH_SIZE, DJANGO_TMP_OUTPUT_DIRECTORY -from django_celery_beat.models import PeriodicTask +from prowler.lib.check.compliance_models import Compliance +from prowler.lib.outputs.compliance.generic.generic import GenericCompliance +from prowler.lib.outputs.finding import Finding as FindingOutput +from tasks.jobs.attack_paths import ( + attack_paths_scan, + can_provider_run_attack_paths_scan, +) from tasks.jobs.backfill import ( backfill_compliance_summaries, backfill_daily_severity_summaries, @@ -50,17 +66,6 @@ from tasks.jobs.scan import ( ) from tasks.utils import batched, get_next_execution_datetime -from api.compliance import get_compliance_frameworks -from api.db_router import READ_REPLICA_ALIAS -from api.db_utils import rls_transaction -from api.decorators import handle_provider_deletion, set_tenant -from api.models import Finding, Integration, Provider, Scan, ScanSummary, StateChoices -from api.utils import initialize_prowler_provider -from api.v1.serializers import ScanTaskSerializer -from prowler.lib.check.compliance_models import Compliance -from prowler.lib.outputs.compliance.generic.generic import GenericCompliance -from prowler.lib.outputs.finding import Finding as FindingOutput - logger = get_task_logger(__name__) @@ -153,6 +158,11 @@ def _perform_scan_complete_tasks(tenant_id: str, scan_id: str, provider_id: str) ), ).apply_async() + if can_provider_run_attack_paths_scan(tenant_id, provider_id): + perform_attack_paths_scan_task.apply_async( + kwargs={"tenant_id": tenant_id, "scan_id": scan_id} + ) + @shared_task(base=RLSTask, name="provider-connection-check") @set_tenant @@ -358,6 +368,25 @@ def perform_scan_summary_task(tenant_id: str, scan_id: str): return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id) +# TODO: This task must be queued at the `attack-paths` queue, don't forget to add it to the `docker-entrypoint.sh` file +@shared_task(base=RLSTask, bind=True, name="attack-paths-scan-perform", queue="scans") +def perform_attack_paths_scan_task(self, tenant_id: str, scan_id: str): + """ + Execute an Attack Paths scan for the given provider within the current tenant RLS context. + + Args: + self: The task instance (automatically passed when bind=True). + tenant_id (str): The tenant identifier for RLS context. + scan_id (str): The Prowler scan identifier for obtaining the tenant and provider context. + + Returns: + Any: The result from `attack_paths_scan`, including any per-scan failure details. + """ + return attack_paths_scan( + tenant_id=tenant_id, scan_id=scan_id, task_id=self.request.id + ) + + @shared_task(name="tenant-deletion", queue="deletion", autoretry_for=(Exception,)) def delete_tenant_task(tenant_id: str): return delete_tenant(pk=tenant_id) diff --git a/api/src/backend/tasks/tests/test_attack_paths_scan.py b/api/src/backend/tasks/tests/test_attack_paths_scan.py new file mode 100644 index 0000000000..c94e8c83b6 --- /dev/null +++ b/api/src/backend/tasks/tests/test_attack_paths_scan.py @@ -0,0 +1,416 @@ +from contextlib import nullcontext +from types import SimpleNamespace +from unittest.mock import MagicMock, call, patch + +import pytest + +from api.models import ( + AttackPathsScan, + Finding, + Provider, + Resource, + ResourceFindingMapping, + Scan, + StateChoices, + StatusChoices, +) +from prowler.lib.check.models import Severity +from tasks.jobs.attack_paths import prowler as prowler_module +from tasks.jobs.attack_paths.scan import run as attack_paths_run + + +@pytest.mark.django_db +class TestAttackPathsRun: + def test_run_success_flow(self, tenants_fixture, providers_fixture, scans_fixture): + tenant = tenants_fixture[0] + provider = providers_fixture[0] + provider.provider = Provider.ProviderChoices.AWS + provider.save() + scan = scans_fixture[0] + scan.provider = provider + scan.save() + + attack_paths_scan = AttackPathsScan.objects.create( + tenant_id=tenant.id, + provider=provider, + scan=scan, + state=StateChoices.SCHEDULED, + ) + + mock_session = MagicMock() + session_ctx = MagicMock() + session_ctx.__enter__.return_value = mock_session + session_ctx.__exit__.return_value = False + ingestion_result = {"organizations": "warning"} + ingestion_fn = MagicMock(return_value=ingestion_result) + + with ( + patch( + "tasks.jobs.attack_paths.scan.rls_transaction", + new=lambda *args, **kwargs: nullcontext(), + ), + patch( + "tasks.jobs.attack_paths.scan.initialize_prowler_provider", + return_value=MagicMock(_enabled_regions=["us-east-1"]), + ), + patch( + "tasks.jobs.attack_paths.scan.graph_database.get_uri", + return_value="bolt://neo4j", + ), + patch( + "tasks.jobs.attack_paths.scan.graph_database.get_database_name", + return_value="db-scan-id", + ) as mock_get_db_name, + patch( + "tasks.jobs.attack_paths.scan.graph_database.create_database" + ) as mock_create_db, + patch( + "tasks.jobs.attack_paths.scan.graph_database.get_session", + return_value=session_ctx, + ) as mock_get_session, + patch( + "tasks.jobs.attack_paths.scan.cartography_create_indexes.run" + ) as mock_cartography_indexes, + patch( + "tasks.jobs.attack_paths.scan.cartography_analysis.run" + ) as mock_cartography_analysis, + patch( + "tasks.jobs.attack_paths.scan.cartography_ontology.run" + ) as mock_cartography_ontology, + patch( + "tasks.jobs.attack_paths.scan.prowler.create_indexes" + ) as mock_prowler_indexes, + patch( + "tasks.jobs.attack_paths.scan.prowler.analysis" + ) as mock_prowler_analysis, + patch( + "tasks.jobs.attack_paths.scan.db_utils.retrieve_attack_paths_scan", + return_value=attack_paths_scan, + ) as mock_retrieve_scan, + patch( + "tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan" + ) as mock_starting, + patch( + "tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress" + ) as mock_update_progress, + patch( + "tasks.jobs.attack_paths.scan.db_utils.finish_attack_paths_scan" + ) as mock_finish, + patch( + "tasks.jobs.attack_paths.scan.get_cartography_ingestion_function", + return_value=ingestion_fn, + ) as mock_get_ingestion, + patch( + "tasks.jobs.attack_paths.scan._call_within_event_loop", + side_effect=lambda fn, *a, **kw: fn(*a, **kw), + ) as mock_event_loop, + ): + result = attack_paths_run(str(tenant.id), str(scan.id), "task-123") + + assert result == ingestion_result + mock_retrieve_scan.assert_called_once_with(str(tenant.id), str(scan.id)) + mock_starting.assert_called_once() + config = mock_starting.call_args[0][2] + assert config.neo4j_database == "db-scan-id" + + mock_create_db.assert_called_once_with("db-scan-id") + mock_get_session.assert_called_once_with("db-scan-id") + mock_cartography_indexes.assert_called_once_with(mock_session, config) + mock_prowler_indexes.assert_called_once_with(mock_session) + mock_cartography_analysis.assert_called_once_with(mock_session, config) + mock_cartography_ontology.assert_called_once_with(mock_session, config) + mock_prowler_analysis.assert_called_once_with( + mock_session, + provider, + str(scan.id), + config, + ) + mock_get_ingestion.assert_called_once_with(provider.provider) + mock_event_loop.assert_called_once() + mock_update_progress.assert_any_call(attack_paths_scan, 1) + mock_update_progress.assert_any_call(attack_paths_scan, 2) + mock_update_progress.assert_any_call(attack_paths_scan, 95) + mock_finish.assert_called_once_with( + attack_paths_scan, StateChoices.COMPLETED, ingestion_result + ) + mock_get_db_name.assert_called_once_with(attack_paths_scan.id) + + def test_run_failure_marks_scan_failed( + self, tenants_fixture, providers_fixture, scans_fixture + ): + tenant = tenants_fixture[0] + provider = providers_fixture[0] + provider.provider = Provider.ProviderChoices.AWS + provider.save() + scan = scans_fixture[0] + scan.provider = provider + scan.save() + + attack_paths_scan = AttackPathsScan.objects.create( + tenant_id=tenant.id, + provider=provider, + scan=scan, + state=StateChoices.SCHEDULED, + ) + + mock_session = MagicMock() + session_ctx = MagicMock() + session_ctx.__enter__.return_value = mock_session + session_ctx.__exit__.return_value = False + ingestion_fn = MagicMock(side_effect=RuntimeError("ingestion boom")) + + with ( + patch( + "tasks.jobs.attack_paths.scan.rls_transaction", + new=lambda *args, **kwargs: nullcontext(), + ), + patch( + "tasks.jobs.attack_paths.scan.initialize_prowler_provider", + return_value=MagicMock(_enabled_regions=["us-east-1"]), + ), + patch("tasks.jobs.attack_paths.scan.graph_database.get_uri"), + patch( + "tasks.jobs.attack_paths.scan.graph_database.get_database_name", + return_value="db-scan-id", + ), + patch("tasks.jobs.attack_paths.scan.graph_database.create_database"), + patch( + "tasks.jobs.attack_paths.scan.graph_database.get_session", + return_value=session_ctx, + ), + patch("tasks.jobs.attack_paths.scan.cartography_create_indexes.run"), + patch("tasks.jobs.attack_paths.scan.cartography_analysis.run"), + patch("tasks.jobs.attack_paths.scan.prowler.create_indexes"), + patch("tasks.jobs.attack_paths.scan.prowler.analysis"), + patch( + "tasks.jobs.attack_paths.scan.db_utils.retrieve_attack_paths_scan", + return_value=attack_paths_scan, + ), + patch("tasks.jobs.attack_paths.scan.db_utils.starting_attack_paths_scan"), + patch( + "tasks.jobs.attack_paths.scan.db_utils.update_attack_paths_scan_progress" + ), + patch( + "tasks.jobs.attack_paths.scan.db_utils.finish_attack_paths_scan" + ) as mock_finish, + patch( + "tasks.jobs.attack_paths.scan.get_cartography_ingestion_function", + return_value=ingestion_fn, + ), + patch( + "tasks.jobs.attack_paths.scan._call_within_event_loop", + side_effect=lambda fn, *a, **kw: fn(*a, **kw), + ), + patch( + "tasks.jobs.attack_paths.scan.utils.stringify_exception", + return_value="Cartography failed: ingestion boom", + ), + ): + with pytest.raises(RuntimeError, match="ingestion boom"): + attack_paths_run(str(tenant.id), str(scan.id), "task-456") + + failure_args = mock_finish.call_args[0] + assert failure_args[0] is attack_paths_scan + assert failure_args[1] == StateChoices.FAILED + assert failure_args[2] == { + "global_cartography_error": "Cartography failed: ingestion boom" + } + + def test_run_returns_early_for_unsupported_provider(self, tenants_fixture): + tenant = tenants_fixture[0] + provider = Provider.objects.create( + provider=Provider.ProviderChoices.GCP, + uid="gcp-account", + alias="gcp", + tenant_id=tenant.id, + ) + scan = Scan.objects.create( + name="GCP Scan", + provider=provider, + trigger=Scan.TriggerChoices.MANUAL, + state=StateChoices.AVAILABLE, + tenant_id=tenant.id, + ) + + with ( + patch( + "tasks.jobs.attack_paths.scan.rls_transaction", + new=lambda *args, **kwargs: nullcontext(), + ), + patch( + "tasks.jobs.attack_paths.scan.initialize_prowler_provider", + return_value=MagicMock(), + ), + patch( + "tasks.jobs.attack_paths.scan.get_cartography_ingestion_function", + return_value=None, + ) as mock_get_ingestion, + patch( + "tasks.jobs.attack_paths.scan.db_utils.retrieve_attack_paths_scan" + ) as mock_retrieve, + ): + mock_retrieve.return_value = None + result = attack_paths_run(str(tenant.id), str(scan.id), "task-789") + + assert result == { + "global_error": "Provider gcp is not supported for Attack Paths scans" + } + mock_get_ingestion.assert_called_once_with(provider.provider) + mock_retrieve.assert_called_once_with(str(tenant.id), str(scan.id)) + + +@pytest.mark.django_db +class TestAttackPathsProwlerHelpers: + def test_create_indexes_executes_all_statements(self): + mock_session = MagicMock() + with patch("tasks.jobs.attack_paths.prowler.run_write_query") as mock_run_write: + prowler_module.create_indexes(mock_session) + + assert mock_run_write.call_count == len(prowler_module.INDEX_STATEMENTS) + mock_run_write.assert_has_calls( + [call(mock_session, stmt) for stmt in prowler_module.INDEX_STATEMENTS] + ) + + def test_load_findings_batches_requests(self, providers_fixture): + provider = providers_fixture[0] + provider.provider = Provider.ProviderChoices.AWS + provider.save() + + findings = [ + {"id": "1", "resource_uid": "r-1"}, + {"id": "2", "resource_uid": "r-2"}, + ] + config = SimpleNamespace(update_tag=12345) + mock_session = MagicMock() + + with ( + patch.object(prowler_module, "BATCH_SIZE", 1), + patch( + "tasks.jobs.attack_paths.prowler.get_root_node_label", + return_value="AWSAccount", + ), + patch( + "tasks.jobs.attack_paths.prowler.get_node_uid_field", + return_value="arn", + ), + ): + prowler_module.load_findings(mock_session, findings, provider, config) + + assert mock_session.run.call_count == 2 + for call_args in mock_session.run.call_args_list: + params = call_args.args[1] + assert params["provider_uid"] == str(provider.uid) + assert params["last_updated"] == config.update_tag + assert "findings_data" in params + + def test_cleanup_findings_runs_batches(self, providers_fixture): + provider = providers_fixture[0] + config = SimpleNamespace(update_tag=1024) + mock_session = MagicMock() + + first_batch = MagicMock() + first_batch.single.return_value = {"deleted_findings_count": 3} + second_batch = MagicMock() + second_batch.single.return_value = {"deleted_findings_count": 0} + mock_session.run.side_effect = [first_batch, second_batch] + + prowler_module.cleanup_findings(mock_session, provider, config) + + assert mock_session.run.call_count == 2 + params = mock_session.run.call_args.args[1] + assert params["provider_uid"] == str(provider.uid) + assert params["last_updated"] == config.update_tag + + def test_get_provider_last_scan_findings_returns_latest_scan_data( + self, + tenants_fixture, + providers_fixture, + ): + tenant = tenants_fixture[0] + provider = providers_fixture[0] + provider.provider = Provider.ProviderChoices.AWS + provider.save() + + resource = Resource.objects.create( + tenant_id=tenant.id, + provider=provider, + uid="resource-uid", + name="Resource", + region="us-east-1", + service="ec2", + type="instance", + ) + + older_scan = Scan.objects.create( + name="Older", + provider=provider, + trigger=Scan.TriggerChoices.MANUAL, + state=StateChoices.COMPLETED, + tenant_id=tenant.id, + ) + old_finding = Finding.objects.create( + tenant_id=tenant.id, + uid="older-finding", + scan=older_scan, + delta=Finding.DeltaChoices.NEW, + status=StatusChoices.PASS, + status_extended="ok", + severity=Severity.low, + impact=Severity.low, + impact_extended="", + raw_result={}, + check_id="check-old", + check_metadata={"checktitle": "Old"}, + first_seen_at=older_scan.inserted_at, + ) + ResourceFindingMapping.objects.create( + tenant_id=tenant.id, + resource=resource, + finding=old_finding, + ) + + latest_scan = Scan.objects.create( + name="Latest", + provider=provider, + trigger=Scan.TriggerChoices.MANUAL, + state=StateChoices.COMPLETED, + tenant_id=tenant.id, + ) + finding = Finding.objects.create( + tenant_id=tenant.id, + uid="finding-uid", + scan=latest_scan, + delta=Finding.DeltaChoices.NEW, + status=StatusChoices.FAIL, + status_extended="failed", + severity=Severity.high, + impact=Severity.high, + impact_extended="", + raw_result={}, + check_id="check-1", + check_metadata={"checktitle": "Check title"}, + first_seen_at=latest_scan.inserted_at, + ) + ResourceFindingMapping.objects.create( + tenant_id=tenant.id, + resource=resource, + finding=finding, + ) + + latest_scan.refresh_from_db() + + with patch( + "tasks.jobs.attack_paths.prowler.rls_transaction", + new=lambda *args, **kwargs: nullcontext(), + ): + findings_data = prowler_module.get_provider_last_scan_findings( + provider, + str(latest_scan.id), + ) + + assert len(findings_data) == 1 + finding_dict = findings_data[0] + assert finding_dict["id"] == str(finding.id) + assert finding_dict["resource_uid"] == resource.uid + assert finding_dict["check_title"] == "Check title" + assert finding_dict["scan_id"] == str(latest_scan.id) diff --git a/api/src/backend/tasks/tests/test_connection.py b/api/src/backend/tasks/tests/test_connection.py index 30973f98bf..e5e39d8778 100644 --- a/api/src/backend/tasks/tests/test_connection.py +++ b/api/src/backend/tasks/tests/test_connection.py @@ -82,7 +82,7 @@ def test_check_provider_connection_exception( [ { "name": "OpenAI", - "api_key_decoded": "sk-test1234567890T3BlbkFJtest1234567890", + "api_key_decoded": "sk-fake-test-key-for-unit-testing-only", "model": "gpt-4o", "temperature": 0, "max_tokens": 4000, diff --git a/api/src/backend/tasks/tests/test_deletion.py b/api/src/backend/tasks/tests/test_deletion.py index 81cdb44daa..fc90bee0e3 100644 --- a/api/src/backend/tasks/tests/test_deletion.py +++ b/api/src/backend/tasks/tests/test_deletion.py @@ -1,27 +1,60 @@ +from unittest.mock import call, patch + import pytest + from django.core.exceptions import ObjectDoesNotExist -from tasks.jobs.deletion import delete_provider, delete_tenant from api.models import Provider, Tenant +from tasks.jobs.deletion import delete_provider, delete_tenant @pytest.mark.django_db class TestDeleteProvider: def test_delete_provider_success(self, providers_fixture): - instance = providers_fixture[0] - tenant_id = str(instance.tenant_id) - result = delete_provider(tenant_id, instance.id) + with patch( + "tasks.jobs.deletion.get_provider_graph_database_names" + ) as mock_get_provider_graph_database_names, patch( + "tasks.jobs.deletion.graph_database.drop_database" + ) as mock_drop_database: + graph_db_names = ["graph-db-1", "graph-db-2"] + mock_get_provider_graph_database_names.return_value = graph_db_names - assert result - with pytest.raises(ObjectDoesNotExist): - Provider.objects.get(pk=instance.id) + instance = providers_fixture[0] + tenant_id = str(instance.tenant_id) + result = delete_provider(tenant_id, instance.id) + + assert result + with pytest.raises(ObjectDoesNotExist): + Provider.objects.get(pk=instance.id) + + mock_get_provider_graph_database_names.assert_called_once_with( + tenant_id, instance.id + ) + mock_drop_database.assert_has_calls( + [call(graph_db_name) for graph_db_name in graph_db_names] + ) def test_delete_provider_does_not_exist(self, tenants_fixture): - tenant_id = str(tenants_fixture[0].id) - non_existent_pk = "babf6796-cfcc-4fd3-9dcf-88d012247645" + with patch( + "tasks.jobs.deletion.get_provider_graph_database_names" + ) as mock_get_provider_graph_database_names, patch( + "tasks.jobs.deletion.graph_database.drop_database" + ) as mock_drop_database: + graph_db_names = ["graph-db-1"] + mock_get_provider_graph_database_names.return_value = graph_db_names - with pytest.raises(ObjectDoesNotExist): - delete_provider(tenant_id, non_existent_pk) + tenant_id = str(tenants_fixture[0].id) + non_existent_pk = "babf6796-cfcc-4fd3-9dcf-88d012247645" + + with pytest.raises(ObjectDoesNotExist): + delete_provider(tenant_id, non_existent_pk) + + mock_get_provider_graph_database_names.assert_called_once_with( + tenant_id, non_existent_pk + ) + mock_drop_database.assert_has_calls( + [call(graph_db_name) for graph_db_name in graph_db_names] + ) @pytest.mark.django_db @@ -30,33 +63,68 @@ class TestDeleteTenant: """ Test successful deletion of a tenant and its related data. """ - tenant = tenants_fixture[0] - providers = Provider.objects.filter(tenant_id=tenant.id) + with patch( + "tasks.jobs.deletion.get_provider_graph_database_names" + ) as mock_get_provider_graph_database_names, patch( + "tasks.jobs.deletion.graph_database.drop_database" + ) as mock_drop_database: + tenant = tenants_fixture[0] + providers = list(Provider.objects.filter(tenant_id=tenant.id)) - # Ensure the tenant and related providers exist before deletion - assert Tenant.objects.filter(id=tenant.id).exists() - assert providers.exists() + graph_db_names_per_provider = [ + [f"graph-db-{provider.id}"] for provider in providers + ] + mock_get_provider_graph_database_names.side_effect = ( + graph_db_names_per_provider + ) - # Call the function and validate the result - deletion_summary = delete_tenant(tenant.id) + # Ensure the tenant and related providers exist before deletion + assert Tenant.objects.filter(id=tenant.id).exists() + assert providers - assert deletion_summary is not None - assert not Tenant.objects.filter(id=tenant.id).exists() - assert not Provider.objects.filter(tenant_id=tenant.id).exists() + # Call the function and validate the result + deletion_summary = delete_tenant(tenant.id) + + assert deletion_summary is not None + assert not Tenant.objects.filter(id=tenant.id).exists() + assert not Provider.objects.filter(tenant_id=tenant.id).exists() + + expected_calls = [ + call(provider.tenant_id, provider.id) for provider in providers + ] + mock_get_provider_graph_database_names.assert_has_calls( + expected_calls, any_order=True + ) + assert mock_get_provider_graph_database_names.call_count == len( + expected_calls + ) + expected_drop_calls = [ + call(graph_db_name[0]) for graph_db_name in graph_db_names_per_provider + ] + mock_drop_database.assert_has_calls(expected_drop_calls, any_order=True) + assert mock_drop_database.call_count == len(expected_drop_calls) def test_delete_tenant_with_no_providers(self, tenants_fixture): """ Test deletion of a tenant with no related providers. """ - tenant = tenants_fixture[1] # Assume this tenant has no providers - providers = Provider.objects.filter(tenant_id=tenant.id) + with patch( + "tasks.jobs.deletion.get_provider_graph_database_names" + ) as mock_get_provider_graph_database_names, patch( + "tasks.jobs.deletion.graph_database.drop_database" + ) as mock_drop_database: + tenant = tenants_fixture[1] # Assume this tenant has no providers + providers = Provider.objects.filter(tenant_id=tenant.id) - # Ensure the tenant exists but has no related providers - assert Tenant.objects.filter(id=tenant.id).exists() - assert not providers.exists() + # Ensure the tenant exists but has no related providers + assert Tenant.objects.filter(id=tenant.id).exists() + assert not providers.exists() - # Call the function and validate the result - deletion_summary = delete_tenant(tenant.id) + # Call the function and validate the result + deletion_summary = delete_tenant(tenant.id) - assert deletion_summary == {} # No providers, so empty summary - assert not Tenant.objects.filter(id=tenant.id).exists() + assert deletion_summary == {} # No providers, so empty summary + assert not Tenant.objects.filter(id=tenant.id).exists() + + mock_get_provider_graph_database_names.assert_not_called() + mock_drop_database.assert_not_called() diff --git a/api/src/backend/tasks/tests/test_tasks.py b/api/src/backend/tasks/tests/test_tasks.py index 376315d90d..3d44cd95bb 100644 --- a/api/src/backend/tasks/tests/test_tasks.py +++ b/api/src/backend/tasks/tests/test_tasks.py @@ -1,10 +1,21 @@ import uuid + +from contextlib import contextmanager from unittest.mock import MagicMock, patch import openai import pytest + from botocore.exceptions import ClientError from django_celery_beat.models import IntervalSchedule, PeriodicTask + +from api.models import ( + Integration, + LighthouseProviderConfiguration, + LighthouseProviderModels, + Scan, + StateChoices, +) from tasks.jobs.lighthouse_providers import ( _create_bedrock_client, _extract_bedrock_credentials, @@ -15,19 +26,12 @@ from tasks.tasks import ( check_integrations_task, check_lighthouse_provider_connection_task, generate_outputs_task, + perform_attack_paths_scan_task, refresh_lighthouse_provider_models_task, s3_integration_task, security_hub_integration_task, ) -from api.models import ( - Integration, - LighthouseProviderConfiguration, - LighthouseProviderModels, - Scan, - StateChoices, -) - @pytest.mark.django_db class TestExtractBedrockCredentials: @@ -737,8 +741,12 @@ class TestScanCompleteTasks: @patch("tasks.tasks.generate_outputs_task.si") @patch("tasks.tasks.generate_compliance_reports_task.si") @patch("tasks.tasks.check_integrations_task.si") + @patch("tasks.tasks.perform_attack_paths_scan_task.apply_async") + @patch("tasks.tasks.can_provider_run_attack_paths_scan", return_value=False) def test_scan_complete_tasks( self, + mock_can_run_attack_paths, + mock_attack_paths_task, mock_check_integrations_task, mock_compliance_reports_task, mock_outputs_task, @@ -793,6 +801,67 @@ class TestScanCompleteTasks: scan_id="scan-id", ) + # Attack Paths task should be skipped when provider cannot run it + mock_attack_paths_task.assert_not_called() + + +class TestAttackPathsTasks: + @staticmethod + @contextmanager + def _override_task_request(task, **attrs): + request = task.request + sentinel = object() + previous = {key: getattr(request, key, sentinel) for key in attrs} + for key, value in attrs.items(): + setattr(request, key, value) + + try: + yield + finally: + for key, prev in previous.items(): + if prev is sentinel: + if hasattr(request, key): + delattr(request, key) + else: + setattr(request, key, prev) + + def test_perform_attack_paths_scan_task_calls_runner(self): + with ( + patch("tasks.tasks.attack_paths_scan") as mock_attack_paths_scan, + self._override_task_request( + perform_attack_paths_scan_task, id="celery-task-id" + ), + ): + mock_attack_paths_scan.return_value = {"status": "ok"} + + result = perform_attack_paths_scan_task.run( + tenant_id="tenant-id", scan_id="scan-id" + ) + + mock_attack_paths_scan.assert_called_once_with( + tenant_id="tenant-id", scan_id="scan-id", task_id="celery-task-id" + ) + assert result == {"status": "ok"} + + def test_perform_attack_paths_scan_task_propagates_exception(self): + with ( + patch( + "tasks.tasks.attack_paths_scan", + side_effect=RuntimeError("Exception to propagate"), + ) as mock_attack_paths_scan, + self._override_task_request( + perform_attack_paths_scan_task, id="celery-task-error" + ), + ): + with pytest.raises(RuntimeError, match="Exception to propagate"): + perform_attack_paths_scan_task.run( + tenant_id="tenant-id", scan_id="scan-id" + ) + + mock_attack_paths_scan.assert_called_once_with( + tenant_id="tenant-id", scan_id="scan-id", task_id="celery-task-error" + ) + @pytest.mark.django_db class TestCheckIntegrationsTask: diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 746948cc3a..90ab1b6390 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -1,6 +1,7 @@ services: api-dev: hostname: "prowler-api" + image: prowler-api-dev build: context: ./api dockerfile: Dockerfile @@ -24,6 +25,8 @@ services: condition: service_healthy valkey: condition: service_healthy + neo4j: + condition: service_healthy entrypoint: - "/home/prowler/docker-entrypoint.sh" - "dev" @@ -85,7 +88,41 @@ services: timeout: 5s retries: 3 + neo4j: + image: graphstack/dozerdb:5.26.3.0 + hostname: "neo4j" + volumes: + - ./_data/neo4j:/data + environment: + # We can't add our .env file because some of our current variables are not compatible with Neo4j env vars + # Auth + - NEO4J_AUTH=${NEO4J_USER}/${NEO4J_PASSWORD} + # Memory limits + - NEO4J_dbms_max__databases=${NEO4J_DBMS_MAX__DATABASES:-1000000} + - NEO4J_server_memory_pagecache_size=${NEO4J_SERVER_MEMORY_PAGECACHE_SIZE:-1G} + - NEO4J_server_memory_heap_initial__size=${NEO4J_SERVER_MEMORY_HEAP_INITIAL__SIZE:-1G} + - NEO4J_server_memory_heap_max__size=${NEO4J_SERVER_MEMORY_HEAP_MAX__SIZE:-1G} + # APOC + - apoc.export.file.enabled=${NEO4J_POC_EXPORT_FILE_ENABLED:-true} + - apoc.import.file.enabled=${NEO4J_APOC_IMPORT_FILE_ENABLED:-true} + - apoc.import.file.use_neo4j_config=${NEO4J_APOC_IMPORT_FILE_USE_NEO4J_CONFIG:-true} + - "NEO4J_PLUGINS=${NEO4J_PLUGINS:-[\"apoc\"]}" + - "NEO4J_dbms_security_procedures_allowlist=${NEO4J_DBMS_SECURITY_PROCEDURES_ALLOWLIST:-apoc.*}" + - "NEO4J_dbms_security_procedures_unrestricted=${NEO4J_DBMS_SECURITY_PROCEDURES_UNRESTRICTED:-apoc.*}" + # Networking + - "dbms.connector.bolt.listen_address=${NEO4J_DBMS_CONNECTOR_BOLT_LISTEN_ADDRESS:-0.0.0.0:7687}" + # 7474 is the UI port + ports: + - 7474:7474 + - ${NEO4J_PORT:-7687}:7687 + healthcheck: + test: ["CMD", "wget", "--no-verbose", "http://localhost:7474"] + interval: 10s + timeout: 10s + retries: 10 + worker-dev: + image: prowler-api-dev build: context: ./api dockerfile: Dockerfile @@ -96,17 +133,23 @@ services: - path: .env required: false volumes: - - "outputs:/tmp/prowler_api_output" + - ./api/src/backend:/home/prowler/backend + - ./api/pyproject.toml:/home/prowler/pyproject.toml + - ./api/docker-entrypoint.sh:/home/prowler/docker-entrypoint.sh + - outputs:/tmp/prowler_api_output depends_on: valkey: condition: service_healthy postgres: condition: service_healthy + neo4j: + condition: service_healthy entrypoint: - "/home/prowler/docker-entrypoint.sh" - "worker" worker-beat: + image: prowler-api-dev build: context: ./api dockerfile: Dockerfile @@ -121,6 +164,8 @@ services: condition: service_healthy postgres: condition: service_healthy + neo4j: + condition: service_healthy entrypoint: - "../docker-entrypoint.sh" - "beat" diff --git a/docker-compose.yml b/docker-compose.yml index 3c9b2f67ff..0cf60c66d3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -21,6 +21,8 @@ services: condition: service_healthy valkey: condition: service_healthy + neo4j: + condition: service_healthy entrypoint: - "/home/prowler/docker-entrypoint.sh" - "prod" @@ -72,6 +74,37 @@ services: timeout: 5s retries: 3 + neo4j: + image: graphstack/dozerdb:5.26.3.0 + hostname: "neo4j" + volumes: + - ./_data/neo4j:/data + environment: + # We can't add our .env file because some of our current variables are not compatible with Neo4j env vars + # Auth + - NEO4J_AUTH=${NEO4J_USER}/${NEO4J_PASSWORD} + # Memory limits + - NEO4J_dbms_max__databases=${NEO4J_DBMS_MAX__DATABASES:-1000000} + - NEO4J_server_memory_pagecache_size=${NEO4J_SERVER_MEMORY_PAGECACHE_SIZE:-1G} + - NEO4J_server_memory_heap_initial__size=${NEO4J_SERVER_MEMORY_HEAP_INITIAL__SIZE:-1G} + - NEO4J_server_memory_heap_max__size=${NEO4J_SERVER_MEMORY_HEAP_MAX__SIZE:-1G} + # APOC + - apoc.export.file.enabled=${NEO4J_POC_EXPORT_FILE_ENABLED:-true} + - apoc.import.file.enabled=${NEO4J_APOC_IMPORT_FILE_ENABLED:-true} + - apoc.import.file.use_neo4j_config=${NEO4J_APOC_IMPORT_FILE_USE_NEO4J_CONFIG:-true} + - "NEO4J_PLUGINS=${NEO4J_PLUGINS:-[\"apoc\"]}" + - "NEO4J_dbms_security_procedures_allowlist=${NEO4J_DBMS_SECURITY_PROCEDURES_ALLOWLIST:-apoc.*}" + - "NEO4J_dbms_security_procedures_unrestricted=${NEO4J_DBMS_SECURITY_PROCEDURES_UNRESTRICTED:-apoc.*}" + # Networking + - "dbms.connector.bolt.listen_address=${NEO4J_DBMS_CONNECTOR_BOLT_LISTEN_ADDRESS:-0.0.0.0:7687}" + ports: + - ${NEO4J_PORT:-7687}:7687 + healthcheck: + test: ["CMD", "wget", "--no-verbose", "http://localhost:7474"] + interval: 10s + timeout: 10s + retries: 10 + worker: image: prowlercloud/prowler-api:${PROWLER_API_VERSION:-stable} env_file: diff --git a/skills/prowler-ci/SKILL.md b/skills/prowler-ci/SKILL.md index 1bed87d74e..b673178c8c 100644 --- a/skills/prowler-ci/SKILL.md +++ b/skills/prowler-ci/SKILL.md @@ -44,7 +44,31 @@ Use this skill whenever you are: 3. If it's a title check: verify PR title matches Conventional Commits. 4. If it's changelog: verify the right `CHANGELOG.md` is updated OR apply `no-changelog` label. 5. If it's conflict checker: remove `<<<<<<<`, `=======`, `>>>>>>>` markers. -6. If it's secrets: remove credentials and rotate anything leaked. +6. If it's secrets (TruffleHog): see section below. + +## TruffleHog Secret Scanning + +TruffleHog scans for leaked secrets. Common false positives in test files: + +**Patterns that trigger TruffleHog:** +- `sk-*T3BlbkFJ*` - OpenAI API keys +- `AKIA[A-Z0-9]{16}` - AWS Access Keys +- `ghp_*` / `gho_*` - GitHub tokens +- Base64-encoded strings that look like credentials + +**Fix for test files:** +```python +# BAD - looks like real OpenAI key +api_key = "sk-test1234567890T3BlbkFJtest1234567890" + +# GOOD - obviously fake +api_key = "sk-fake-test-key-for-unit-testing-only" +``` + +**If TruffleHog flags a real secret:** +1. Remove the secret from the code immediately +2. Rotate the credential (it's now in git history) +3. Consider using `.trufflehog-ignore` for known false positives (rarely needed) ## Notes diff --git a/skills/prowler-test-api/SKILL.md b/skills/prowler-test-api/SKILL.md index edda0b9c6d..f0cf7bade8 100644 --- a/skills/prowler-test-api/SKILL.md +++ b/skills/prowler-test-api/SKILL.md @@ -20,6 +20,7 @@ allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task - ALWAYS use `content_type = "application/vnd.api+json"` in requests - ALWAYS test cross-tenant isolation with `other_tenant_provider` fixture - NEVER skip RLS isolation tests when adding new endpoints +- NEVER use realistic-looking API keys in tests (TruffleHog will flag them) --- @@ -107,6 +108,27 @@ def test_task_success(self, mock_scan): --- +## 7. Fake Secrets in Tests (TruffleHog) + +CI runs TruffleHog to detect leaked secrets. Use obviously fake values: + +```python +# BAD - TruffleHog will flag these patterns: +api_key = "sk-test1234567890T3BlbkFJtest1234567890" # OpenAI pattern +api_key = "AKIA..." # AWS pattern + +# GOOD - clearly fake values: +api_key = "sk-fake-test-key-for-unit-testing-only" +api_key = "fake-aws-key-for-testing" +``` + +**Patterns to avoid:** +- `sk-*T3BlbkFJ*` (OpenAI) +- `AKIA[A-Z0-9]{16}` (AWS Access Key) +- `ghp_*` or `gho_*` (GitHub tokens) + +--- + ## Commands ```bash diff --git a/ui/.husky/pre-commit b/ui/.husky/pre-commit index 9e12cf3a85..db9b378299 100755 --- a/ui/.husky/pre-commit +++ b/ui/.husky/pre-commit @@ -37,8 +37,8 @@ CODE_REVIEW_ENABLED=$(echo "$CODE_REVIEW_ENABLED" | tr '[:upper:]' '[:lower:]') echo -e "${BLUE}ℹ️ Code Review Status: ${CODE_REVIEW_ENABLED}${NC}" echo "" -# Get staged files (what will be committed) -STAGED_FILES=$(git diff --cached --name-only --diff-filter=ACM | grep -E '\.(tsx?|jsx?)$' || true) +# Get staged files in the UI folder only (what will be committed) +STAGED_FILES=$(git diff --cached --name-only --diff-filter=ACM -- 'ui/**' | grep -E '\.(tsx?|jsx?)$' || true) if [ "$CODE_REVIEW_ENABLED" = "true" ]; then if [ -z "$STAGED_FILES" ]; then @@ -135,7 +135,14 @@ else echo "" fi -# Run healthcheck (typecheck and lint check) +# Check if there are any UI files to validate +if [ -z "$STAGED_FILES" ] && [ "$CODE_REVIEW_ENABLED" = "true" ]; then + echo -e "${YELLOW}⏭️ No UI files to validate, skipping healthcheck${NC}" + echo "" + exit 0 +fi + +# Run healthcheck (typecheck and lint check) only if there are UI changes echo -e "${BLUE}🏥 Running healthcheck...${NC}" echo "" diff --git a/ui/CHANGELOG.md b/ui/CHANGELOG.md index e44b3339d5..a58dcf399c 100644 --- a/ui/CHANGELOG.md +++ b/ui/CHANGELOG.md @@ -12,6 +12,7 @@ All notable changes to the **Prowler UI** are documented in this file. - Add ThreatScore pillar breakdown to Compliance Summary page and detail view [(#9773)](https://github.com/prowler-cloud/prowler/pull/9773) - Add Provider and Group filters to Resources page [(#9492)](https://github.com/prowler-cloud/prowler/pull/9492) - Compliance Watchlist component in Overview page [(#9786)](https://github.com/prowler-cloud/prowler/pull/9786) +- Add a new main section for list Attack Paths scans, execute queries on them and view their result as a graph [(#9805)](https://github.com/prowler-cloud/prowler/pull/9805) ### 🔄 Changed @@ -132,6 +133,7 @@ All notable changes to the **Prowler UI** are documented in this file. - PDF reporting for NIS2 compliance framework [(#9170)](https://github.com/prowler-cloud/prowler/pull/9170) - External resource link to IaC findings for direct navigation to source code in Git repositories [(#9151)](https://github.com/prowler-cloud/prowler/pull/9151) - New Overview page and new app styles [(#9234)](https://github.com/prowler-cloud/prowler/pull/9234) +- Attack Paths feature with query execution and graph visualization [(#PROWLER-383)](https://github.com/prowler-cloud/prowler/pull/9270) - Use branch name as region for IaC findings [(#9296)](https://github.com/prowler-cloud/prowler/pull/9296) ### 🔄 Changed diff --git a/ui/actions/attack-paths/index.ts b/ui/actions/attack-paths/index.ts new file mode 100644 index 0000000000..0120dceb86 --- /dev/null +++ b/ui/actions/attack-paths/index.ts @@ -0,0 +1,4 @@ +export * from "./queries"; +export * from "./queries.adapter"; +export * from "./scans"; +export * from "./scans.adapter"; diff --git a/ui/actions/attack-paths/queries.adapter.ts b/ui/actions/attack-paths/queries.adapter.ts new file mode 100644 index 0000000000..fd256739e1 --- /dev/null +++ b/ui/actions/attack-paths/queries.adapter.ts @@ -0,0 +1,55 @@ +import { MetaDataProps } from "@/types"; +import { + AttackPathQueriesResponse, + AttackPathQuery, +} from "@/types/attack-paths"; + +/** + * Adapts raw query API responses to enriched domain models + * - Enriches queries with metadata and computed properties + * - Co-locates related data for better performance + * - Preserves pagination metadata for list operations + * + * Uses plugin architecture for extensibility: + * - Handles query-specific response transformation + * - Can be composed with backend service plugins + * - Maintains separation of concerns between API layer and business logic + */ + +/** + * Adapt attack path queries response with enriched data + * + * @param response - Raw API response from attack-paths-scans/{id}/queries endpoint + * @returns Enriched queries data with metadata + */ +export function adaptAttackPathQueriesResponse( + response: AttackPathQueriesResponse | undefined, +): { + data: AttackPathQuery[]; + metadata?: MetaDataProps; +} { + if (!response?.data) { + return { data: [] }; + } + + // Enrich query data with computed properties + const enrichedData = response.data.map((query) => ({ + ...query, + // Can add computed properties here, e.g.: + // parameterCount: query.attributes.parameters.length, + // requiredParameters: query.attributes.parameters.filter(p => p.required), + // hasParameters: query.attributes.parameters.length > 0, + })); + + const metadata: MetaDataProps | undefined = { + pagination: { + page: 1, + pages: 1, + count: enrichedData.length, + itemsPerPage: [10, 25, 50, 100], + }, + version: "1.0", + }; + + return { data: enrichedData, metadata }; +} diff --git a/ui/actions/attack-paths/queries.ts b/ui/actions/attack-paths/queries.ts new file mode 100644 index 0000000000..0332e6ec22 --- /dev/null +++ b/ui/actions/attack-paths/queries.ts @@ -0,0 +1,97 @@ +"use server"; + +import { z } from "zod"; + +import { apiBaseUrl, getAuthHeaders } from "@/lib"; +import { handleApiResponse } from "@/lib/server-actions-helper"; +import { + AttackPathQueriesResponse, + AttackPathQuery, + AttackPathQueryResult, + ExecuteQueryRequest, +} from "@/types/attack-paths"; + +import { adaptAttackPathQueriesResponse } from "./queries.adapter"; + +// Validation schema for UUID - RFC 9562/4122 compliant +const UUIDSchema = z.uuid(); + +/** + * Fetch available queries for a specific attack path scan + */ +export const getAvailableQueries = async ( + scanId: string, +): Promise<{ data: AttackPathQuery[] } | undefined> => { + // Validate scanId is a valid UUID format to prevent request forgery + const validatedScanId = UUIDSchema.safeParse(scanId); + if (!validatedScanId.success) { + console.error("Invalid scan ID format"); + return undefined; + } + + const headers = await getAuthHeaders({ contentType: false }); + + try { + const response = await fetch( + `${apiBaseUrl}/attack-paths-scans/${validatedScanId.data}/queries`, + { + headers, + method: "GET", + }, + ); + + const apiResponse = (await handleApiResponse( + response, + )) as AttackPathQueriesResponse; + const adaptedData = adaptAttackPathQueriesResponse(apiResponse); + + return { data: adaptedData.data }; + } catch (error) { + console.error("Error fetching available queries for scan:", error); + return undefined; + } +}; + +/** + * Execute a query on an attack path scan + */ +export const executeQuery = async ( + scanId: string, + queryId: string, + parameters?: Record, +): Promise => { + // Validate scanId is a valid UUID format to prevent request forgery + const validatedScanId = UUIDSchema.safeParse(scanId); + if (!validatedScanId.success) { + console.error("Invalid scan ID format"); + return undefined; + } + + const headers = await getAuthHeaders({ contentType: true }); + + const requestBody: ExecuteQueryRequest = { + data: { + type: "attack-paths-query-run-requests", + attributes: { + id: queryId, + ...(parameters && { parameters }), + }, + }, + }; + + try { + const response = await fetch( + `${apiBaseUrl}/attack-paths-scans/${validatedScanId.data}/queries/run`, + { + headers, + method: "POST", + body: JSON.stringify(requestBody), + }, + ); + + return handleApiResponse(response); + } catch (error) { + console.error("Error executing query on scan:", error); + return undefined; + } +}; diff --git a/ui/actions/attack-paths/query-result.adapter.ts b/ui/actions/attack-paths/query-result.adapter.ts new file mode 100644 index 0000000000..65b33843af --- /dev/null +++ b/ui/actions/attack-paths/query-result.adapter.ts @@ -0,0 +1,164 @@ +import { + AttackPathGraphData, + GraphEdge, + GraphNodeProperties, + GraphNodePropertyValue, + GraphRelationship, +} from "@/types/attack-paths"; + +/** + * Normalizes property values to ensure they are primitives + * Arrays are converted to comma-separated strings + * + * @param value - The property value to normalize + * @returns Normalized primitive value + */ +function normalizePropertyValue( + value: + | GraphNodePropertyValue + | GraphNodePropertyValue[] + | Record, +): string | number | boolean | null | undefined { + if (value === null || value === undefined) { + return value; + } + + if (Array.isArray(value)) { + // Convert arrays to comma-separated strings + return value.join(", "); + } + + if ( + typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" + ) { + return value; + } + + // For any other type, convert to string + return String(value); +} + +/** + * Normalizes all properties in an object to ensure they are primitives + * + * @param properties - The properties object to normalize + * @returns Normalized properties object + */ +function normalizeProperties( + properties: Record< + string, + GraphNodePropertyValue | GraphNodePropertyValue[] | Record + >, +): GraphNodeProperties { + const normalized: GraphNodeProperties = {}; + + for (const [key, value] of Object.entries(properties)) { + normalized[key] = normalizePropertyValue(value); + } + + return normalized; +} + +/** + * Adapts graph query result data for D3 visualization + * Transforms relationships array into edges array for D3 force-directed graph + * + * The adapter handles: + * - Converting relationship objects to edge objects compatible with D3 + * - Mapping relationship labels to edge types for graph styling + * - Normalizing array properties to strings (e.g., anonymous_actions: ["s3:GetObject"] -> "s3:GetObject") + * - Preserving node and relationship data structure + * - Adding findings array to each node based on HAS_FINDING edges + * - Adding resources array to finding nodes based on HAS_FINDING edges (reverse relationship) + * + * @param graphData - Raw graph data with nodes and relationships from API + * @returns Graph data with edges array formatted for D3 visualization and findings/resources on nodes + */ +export function adaptQueryResultToGraphData( + graphData: AttackPathGraphData, +): AttackPathGraphData { + // Normalize node properties to ensure all values are primitives + const normalizedNodes = graphData.nodes.map((node) => ({ + ...node, + properties: normalizeProperties( + node.properties as Record< + string, + GraphNodePropertyValue | GraphNodePropertyValue[] + >, + ), + findings: [] as string[], // Will be populated below + resources: [] as string[], // Will be populated below for finding nodes + })); + + // Transform relationships into D3-compatible edges if relationships exist + // Also handle case where edges are already provided (e.g., from mock data) + let edges: GraphEdge[] = []; + + if (graphData.relationships) { + edges = (graphData.relationships as GraphRelationship[]).map( + (relationship) => ({ + id: relationship.id, + source: relationship.source, + target: relationship.target, + type: relationship.label, // D3 uses 'type' for styling edge appearance + properties: relationship.properties + ? normalizeProperties( + relationship.properties as Record< + string, + GraphNodePropertyValue | GraphNodePropertyValue[] + >, + ) + : undefined, + }), + ); + } else if (graphData.edges) { + // If edges are already provided, just normalize their properties + edges = (graphData.edges as GraphEdge[]).map((edge) => ({ + ...edge, + properties: edge.properties + ? normalizeProperties( + edge.properties as Record< + string, + GraphNodePropertyValue | GraphNodePropertyValue[] + >, + ) + : undefined, + })); + } + + // Populate findings and resources based on HAS_FINDING edges + edges.forEach((edge) => { + if (edge.type === "HAS_FINDING") { + const sourceId = + typeof edge.source === "string" + ? edge.source + : (edge.source as { id?: string })?.id; + const targetId = + typeof edge.target === "string" + ? edge.target + : (edge.target as { id?: string })?.id; + + if (sourceId && targetId) { + // Add finding to source node (resource -> finding) + const sourceNode = normalizedNodes.find((n) => n.id === sourceId); + if (sourceNode) { + sourceNode.findings.push(targetId); + } + + // Add resource to target node (finding <- resource) + const targetNode = normalizedNodes.find((n) => n.id === targetId); + if (targetNode) { + targetNode.resources.push(sourceId); + } + } + } + }); + + return { + nodes: normalizedNodes, + edges, + relationships: graphData.relationships, // Preserve original relationships data + }; +} diff --git a/ui/actions/attack-paths/scans.adapter.ts b/ui/actions/attack-paths/scans.adapter.ts new file mode 100644 index 0000000000..a8236241a3 --- /dev/null +++ b/ui/actions/attack-paths/scans.adapter.ts @@ -0,0 +1,89 @@ +import { MetaDataProps } from "@/types"; +import { AttackPathScan, AttackPathScansResponse } from "@/types/attack-paths"; + +/** + * Adapts raw scan API responses to enriched domain models + * - Transforms raw scan data with computed properties + * - Co-locates related data for better performance + * - Preserves pagination metadata for list operations + * + * Uses plugin architecture for extensibility: + * - Handles scan-specific response transformation + * - Can be composed with backend service plugins + * - Maintains separation of concerns between API layer and business logic + */ + +/** + * Adapt attack path scans response with enriched data + * + * @param response - Raw API response from attack-paths-scans endpoint + * @returns Enriched scans data with metadata and computed properties + */ +export function adaptAttackPathScansResponse( + response: AttackPathScansResponse | undefined, +): { + data: AttackPathScan[]; + metadata?: MetaDataProps; +} { + if (!response?.data) { + return { data: [] }; + } + + // Enrich scan data with computed properties + const enrichedData = response.data.map((scan) => ({ + ...scan, + attributes: { + ...scan.attributes, + // Format duration for display + durationLabel: scan.attributes.duration + ? formatDuration(scan.attributes.duration) + : null, + // Check if scan is recent (completed within last 24 hours) + isRecent: isRecentScan(scan.attributes.completed_at), + }, + })); + + // Transform links to MetaDataProps format if pagination exists + const metadata: MetaDataProps | undefined = response.links + ? { + pagination: { + // Links-based pagination doesn't have traditional page numbers + // but we preserve the structure for consistency + page: 1, + pages: 1, + count: enrichedData.length, + itemsPerPage: [10, 25, 50, 100], + }, + version: "1.0", + } + : undefined; + + return { data: enrichedData, metadata }; +} + +/** + * Format duration in seconds to human-readable format + * + * @param seconds - Duration in seconds + * @returns Formatted duration string (e.g., "2m 30s") + */ +function formatDuration(seconds: number): string { + const minutes = Math.floor(seconds / 60); + const remainingSeconds = seconds % 60; + return `${minutes}m ${remainingSeconds}s`; +} + +/** + * Check if a scan is recent (completed within last 24 hours) + * + * @param completedAt - Completion timestamp + * @returns true if scan completed within last 24 hours + */ +function isRecentScan(completedAt: string | null): boolean { + if (!completedAt) return false; + + const completionTime = new Date(completedAt).getTime(); + const oneDayAgo = Date.now() - 24 * 60 * 60 * 1000; + + return completionTime > oneDayAgo; +} diff --git a/ui/actions/attack-paths/scans.ts b/ui/actions/attack-paths/scans.ts new file mode 100644 index 0000000000..11342f6ad3 --- /dev/null +++ b/ui/actions/attack-paths/scans.ts @@ -0,0 +1,69 @@ +"use server"; + +import { z } from "zod"; + +import { apiBaseUrl, getAuthHeaders } from "@/lib"; +import { handleApiResponse } from "@/lib/server-actions-helper"; +import { AttackPathScan, AttackPathScansResponse } from "@/types/attack-paths"; + +import { adaptAttackPathScansResponse } from "./scans.adapter"; + +// Validation schema for UUID - RFC 9562/4122 compliant +const UUIDSchema = z.uuid(); + +/** + * Fetch list of attack path scans (latest scan for each provider) + */ +export const getAttackPathScans = async (): Promise< + { data: AttackPathScan[] } | undefined +> => { + const headers = await getAuthHeaders({ contentType: false }); + + try { + const response = await fetch(`${apiBaseUrl}/attack-paths-scans`, { + headers, + method: "GET", + }); + + const apiResponse = (await handleApiResponse( + response, + )) as AttackPathScansResponse; + const adaptedData = adaptAttackPathScansResponse(apiResponse); + + return { data: adaptedData.data }; + } catch (error) { + console.error("Error fetching attack path scans:", error); + return undefined; + } +}; + +/** + * Fetch detail of a specific attack path scan + */ +export const getAttackPathScanDetail = async ( + scanId: string, +): Promise<{ data: AttackPathScan } | undefined> => { + // Validate scanId is a valid UUID format to prevent request forgery + const validatedScanId = UUIDSchema.safeParse(scanId); + if (!validatedScanId.success) { + console.error("Invalid scan ID format"); + return undefined; + } + + const headers = await getAuthHeaders({ contentType: false }); + + try { + const response = await fetch( + `${apiBaseUrl}/attack-paths-scans/${validatedScanId.data}`, + { + headers, + method: "GET", + }, + ); + + return handleApiResponse(response); + } catch (error) { + console.error("Error fetching attack path scan detail:", error); + return undefined; + } +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/_components/index.ts b/ui/app/(prowler)/attack-paths/(workflow)/_components/index.ts new file mode 100644 index 0000000000..9dab45a6b5 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/_components/index.ts @@ -0,0 +1,2 @@ +export { VerticalSteps } from "./vertical-steps"; +export { WorkflowAttackPaths } from "./workflow-attack-paths"; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/_components/vertical-steps.tsx b/ui/app/(prowler)/attack-paths/(workflow)/_components/vertical-steps.tsx new file mode 100644 index 0000000000..9415e73134 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/_components/vertical-steps.tsx @@ -0,0 +1,299 @@ +"use client"; + +import { useControlledState } from "@react-stately/utils"; +import { domAnimation, LazyMotion, m } from "framer-motion"; +import type { + ComponentProps, + CSSProperties, + HTMLAttributes, + ReactNode, +} from "react"; +import { forwardRef } from "react"; + +import { cn } from "@/lib/utils"; + +export type VerticalStepProps = { + className?: string; + description?: ReactNode; + title?: ReactNode; +}; + +const STEP_COLORS = { + primary: "primary", + secondary: "secondary", + success: "success", + warning: "warning", + danger: "danger", + default: "default", +} as const; + +type StepColor = (typeof STEP_COLORS)[keyof typeof STEP_COLORS]; + +export interface VerticalStepsProps extends HTMLAttributes { + /** + * An array of steps. + * + * @default [] + */ + steps?: VerticalStepProps[]; + /** + * The color of the steps. + * + * @default "primary" + */ + color?: StepColor; + /** + * The current step index. + */ + currentStep?: number; + /** + * The default step index. + * + * @default 0 + */ + defaultStep?: number; + /** + * Whether to hide the progress bars. + * + * @default false + */ + hideProgressBars?: boolean; + /** + * The custom class for the steps wrapper. + */ + className?: string; + /** + * The custom class for the step. + */ + stepClassName?: string; + /** + * Callback function when the step index changes. + */ + onStepChange?: (stepIndex: number) => void; +} + +function CheckIcon(props: ComponentProps<"svg">) { + return ( + + + + ); +} + +export const VerticalSteps = forwardRef( + ( + { + color = "primary", + steps = [], + defaultStep = 0, + onStepChange, + currentStep: currentStepProp, + hideProgressBars = false, + stepClassName, + className, + ...props + }, + ref, + ) => { + const [currentStep, setCurrentStep] = useControlledState( + currentStepProp, + defaultStep, + onStepChange, + ); + + let userColor; + let fgColor; + + const colorsVars = [ + "[--active-fg-color:var(--step-fg-color)]", + "[--active-border-color:var(--step-color)]", + "[--active-color:var(--step-color)]", + "[--complete-background-color:var(--step-color)]", + "[--complete-border-color:var(--step-color)]", + "[--inactive-border-color:hsl(var(--heroui-default-300))]", + "[--inactive-color:hsl(var(--heroui-default-300))]", + ]; + + switch (color) { + case "primary": + userColor = "[--step-color:hsl(var(--heroui-primary))]"; + fgColor = "[--step-fg-color:hsl(var(--heroui-primary-foreground))]"; + break; + case "secondary": + userColor = "[--step-color:hsl(var(--heroui-secondary))]"; + fgColor = "[--step-fg-color:hsl(var(--heroui-secondary-foreground))]"; + break; + case "success": + userColor = "[--step-color:hsl(var(--heroui-success))]"; + fgColor = "[--step-fg-color:hsl(var(--heroui-success-foreground))]"; + break; + case "warning": + userColor = "[--step-color:hsl(var(--heroui-warning))]"; + fgColor = "[--step-fg-color:hsl(var(--heroui-warning-foreground))]"; + break; + case "danger": + userColor = "[--step-color:hsl(var(--heroui-error))]"; + fgColor = "[--step-fg-color:hsl(var(--heroui-error-foreground))]"; + break; + case "default": + userColor = "[--step-color:hsl(var(--heroui-default))]"; + fgColor = "[--step-fg-color:hsl(var(--heroui-default-foreground))]"; + break; + default: + userColor = "[--step-color:hsl(var(--heroui-primary))]"; + fgColor = "[--step-fg-color:hsl(var(--heroui-primary-foreground))]"; + break; + } + + if (!className?.includes("--step-fg-color")) colorsVars.unshift(fgColor); + if (!className?.includes("--step-color")) colorsVars.unshift(userColor); + if (!className?.includes("--inactive-bar-color")) + colorsVars.push("[--inactive-bar-color:hsl(var(--heroui-default-300))]"); + + const colors = colorsVars; + + return ( + + ); + }, +); + +VerticalSteps.displayName = "VerticalSteps"; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/_components/workflow-attack-paths.tsx b/ui/app/(prowler)/attack-paths/(workflow)/_components/workflow-attack-paths.tsx new file mode 100644 index 0000000000..9e1f3684ca --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/_components/workflow-attack-paths.tsx @@ -0,0 +1,49 @@ +"use client"; + +import { usePathname } from "next/navigation"; + +import { VerticalSteps } from "./vertical-steps"; + +/** + * Workflow steps component for Attack Paths wizard + * Shows progress and navigation steps for the two-step process + */ +export const WorkflowAttackPaths = () => { + const pathname = usePathname(); + + // Determine current step based on pathname + const isQueryBuilderStep = pathname.includes("query-builder"); + + const currentStep = isQueryBuilderStep ? 1 : 0; // 0-indexed + + const steps = [ + { + title: "Select Attack Paths Scan", + description: "Choose an AWS account and its latest Attack Paths scan", + }, + { + title: "Build Query & Visualize", + description: "Create a query and view the Attack Paths graph", + }, + ]; + + const progressPercentage = (currentStep / (steps.length - 1)) * 100; + + return ( +
+
+
+
+
+

+ Step {currentStep + 1} of {steps.length} +

+
+ + +
+ ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/layout.tsx b/ui/app/(prowler)/attack-paths/(workflow)/layout.tsx new file mode 100644 index 0000000000..8557ab5369 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/layout.tsx @@ -0,0 +1,21 @@ +import { Navbar } from "@/components/ui/nav-bar/navbar"; + +/** + * Workflow layout for Attack Paths + * Displays content with navbar + */ +export default function AttackPathsWorkflowLayout({ + children, +}: { + children: React.ReactNode; +}) { + return ( + <> + +
+ {/* Content */} +
{children}
+
+ + ); +} diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/execute-button.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/execute-button.tsx new file mode 100644 index 0000000000..07caf5547a --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/execute-button.tsx @@ -0,0 +1,34 @@ +"use client"; + +import { Play } from "lucide-react"; + +import { Button } from "@/components/shadcn"; + +interface ExecuteButtonProps { + isLoading: boolean; + isDisabled: boolean; + onExecute: () => void; +} + +/** + * Execute query button component + * Triggers query execution with loading state + */ +export const ExecuteButton = ({ + isLoading, + isDisabled, + onExecute, +}: ExecuteButtonProps) => { + return ( + + ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/attack-path-graph.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/attack-path-graph.tsx new file mode 100644 index 0000000000..67ed4c42c7 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/attack-path-graph.tsx @@ -0,0 +1,1168 @@ +"use client"; + +import type { D3ZoomEvent, ZoomBehavior } from "d3"; +import { select, zoom, zoomIdentity } from "d3"; +import dagre from "dagre"; +import { + forwardRef, + type Ref, + useEffect, + useImperativeHandle, + useRef, + useState, +} from "react"; + +import type { AttackPathGraphData, GraphNode } from "@/types/attack-paths"; + +import { + formatNodeLabel, + getNodeBorderColor, + getNodeColor, + getPathEdges, + GRAPH_ALERT_BORDER_COLOR, + GRAPH_EDGE_COLOR, + GRAPH_EDGE_HIGHLIGHT_COLOR, +} from "../../_lib"; + +export interface AttackPathGraphRef { + zoomIn: () => void; + zoomOut: () => void; + resetZoom: () => void; + getZoomLevel: () => number; + getSVGElement: () => SVGSVGElement | null; +} + +interface AttackPathGraphProps { + data: AttackPathGraphData; + onNodeClick?: (node: GraphNode) => void; + selectedNodeId?: string | null; + isFilteredView?: boolean; + ref?: Ref; +} + +/** + * Node data type used throughout the graph visualization + */ +type NodeData = { id: string; x: number; y: number; data: GraphNode }; + +// Node dimensions - modern rounded pill style +const NODE_WIDTH = 180; +const NODE_HEIGHT = 50; +const NODE_RADIUS = 25; // Fully rounded ends for pill shape +const HEXAGON_WIDTH = 200; // Width for finding hexagons +const HEXAGON_HEIGHT = 55; // Height for finding hexagons + +/** + * D3 + Dagre hierarchical graph visualization for attack paths + * Renders rounded rectangle nodes with dashed edges + */ +const AttackPathGraphComponent = forwardRef< + AttackPathGraphRef, + AttackPathGraphProps +>(({ data, onNodeClick, selectedNodeId, isFilteredView = false }, ref) => { + const svgRef = useRef(null); + const [zoomLevel, setZoomLevel] = useState(1); + const zoomBehaviorRef = useRef | null>( + null, + ); + const containerRef = useRef + > | null>(null); + const svgSelectionRef = useRef + > | null>(null); + const hiddenNodeIdsRef = useRef>(new Set()); + const onNodeClickRef = useRef(onNodeClick); + const nodeShapesRef = useRef + > | null>(null); + const linkElementsRef = useRef + > | null>(null); + const resourcesWithFindingsRef = useRef>(new Set()); + const selectedNodeIdRef = useRef(null); + const edgesDataRef = useRef< + Array<{ + sourceId: string; + targetId: string; + }> + >([]); + + // Keep selectedNodeIdRef in sync with selectedNodeId + useEffect(() => { + selectedNodeIdRef.current = selectedNodeId ?? null; + }, [selectedNodeId]); + + // Update ref when onNodeClick changes + useEffect(() => { + onNodeClickRef.current = onNodeClick; + }, [onNodeClick]); + + // Update selected node styling and edge highlighting without re-rendering + useEffect(() => { + if (nodeShapesRef.current) { + nodeShapesRef.current + .attr("stroke", (d: NodeData) => { + const isFinding = d.data.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + const hasFindings = resourcesWithFindingsRef.current.has(d.id); + + // Resources with findings always keep red border + if (!isFinding && hasFindings) { + return GRAPH_ALERT_BORDER_COLOR; + } + // Selected nodes get highlight color (orange) + if (d.id === selectedNodeId) { + return GRAPH_EDGE_HIGHLIGHT_COLOR; + } + // Default border color + return getNodeBorderColor(d.data.labels, d.data.properties); + }) + .attr("stroke-width", (d: NodeData) => { + const isFinding = d.data.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + const hasFindings = resourcesWithFindingsRef.current.has(d.id); + const isSelected = d.id === selectedNodeId; + + if (isSelected) return 4; + if (!isFinding && hasFindings) return 2.5; + return isFinding ? 2 : 1.5; + }) + .attr("filter", (d: NodeData) => { + const isFinding = d.data.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + const hasFindings = resourcesWithFindingsRef.current.has(d.id); + const isSelected = d.id === selectedNodeId; + + if (isSelected) return "url(#selectedGlow)"; + if (!isFinding && hasFindings) return "url(#redGlow)"; + return isFinding ? "url(#glow)" : null; + }) + .attr("class", (d: NodeData) => { + const isSelected = d.id === selectedNodeId; + return isSelected ? "node-shape selected-node" : "node-shape"; + }); + } + + // Update edge highlighting for selected node - highlight entire path + if (linkElementsRef.current && edgesDataRef.current.length > 0) { + const pathEdges = selectedNodeId + ? getPathEdges(selectedNodeId, edgesDataRef.current) + : new Set(); + + linkElementsRef.current.each(function (edgeData: { + sourceId: string; + targetId: string; + }) { + const edgeId = `${edgeData.sourceId}-${edgeData.targetId}`; + const isInPath = pathEdges.has(edgeId); + select(this) + .attr( + "stroke", + isInPath ? GRAPH_EDGE_HIGHLIGHT_COLOR : GRAPH_EDGE_COLOR, + ) + .attr( + "marker-end", + isInPath ? "url(#arrowhead-highlight)" : "url(#arrowhead)", + ); + }); + } + }, [selectedNodeId]); + + useImperativeHandle(ref, () => ({ + zoomIn: () => { + if (svgSelectionRef.current && zoomBehaviorRef.current) { + svgSelectionRef.current + .transition() + .duration(300) + .call(zoomBehaviorRef.current.scaleBy, 1.3); + } + }, + zoomOut: () => { + if (svgSelectionRef.current && zoomBehaviorRef.current) { + svgSelectionRef.current + .transition() + .duration(300) + .call(zoomBehaviorRef.current.scaleBy, 0.77); + } + }, + resetZoom: () => { + if ( + svgSelectionRef.current && + zoomBehaviorRef.current && + containerRef.current + ) { + const bounds = containerRef.current.node()?.getBBox(); + if (!bounds) return; + + const fullWidth = svgRef.current?.clientWidth || 800; + const fullHeight = svgRef.current?.clientHeight || 500; + + const midX = bounds.x + bounds.width / 2; + const midY = bounds.y + bounds.height / 2; + const scale = + 0.8 / Math.max(bounds.width / fullWidth, bounds.height / fullHeight); + const tx = fullWidth / 2 - scale * midX; + const ty = fullHeight / 2 - scale * midY; + + svgSelectionRef.current + .transition() + .duration(300) + .call( + zoomBehaviorRef.current.transform, + zoomIdentity.translate(tx, ty).scale(scale), + ); + } + }, + getZoomLevel: () => zoomLevel, + getSVGElement: () => svgRef.current, + })); + + useEffect(() => { + if (!svgRef.current || !data.nodes || data.nodes.length === 0) return; + + // Set dimensions based on container size + const width = svgRef.current.clientWidth || 800; + const height = svgRef.current.clientHeight || 500; + + // Clear previous content + select(svgRef.current).selectAll("*").remove(); + + // Create SVG + const svg = select(svgRef.current) + .attr("width", width) + .attr("height", height) + .attr("viewBox", [0, 0, width, height]); + + // Create container for zoom/pan + const container = svg.append("g") as unknown as ReturnType< + typeof select + >; + containerRef.current = container; + svgSelectionRef.current = svg as unknown as ReturnType< + typeof select + >; + + // Container relationships (reverse direction for layout purposes) + const containerRelations = new Set([ + "RUNS_IN", + "BELONGS_TO", + "LOCATED_IN", + "PART_OF", + ]); + + // Create dagre graph + const g = new dagre.graphlib.Graph(); + g.setGraph({ + rankdir: "LR", // Left to right + nodesep: 80, // Vertical spacing between nodes + ranksep: 150, // Horizontal spacing between ranks + marginx: 50, + marginy: 50, + }); + g.setDefaultEdgeLabel(() => ({})); + + // Initially hide finding nodes - they are shown when user clicks on a node + // In filtered view, show all nodes since they're already filtered to the selected path + const initialHiddenNodes = new Set(); + if (!isFilteredView) { + data.nodes.forEach((node) => { + const isFinding = node.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + if (isFinding) { + initialHiddenNodes.add(node.id); + } + }); + } + hiddenNodeIdsRef.current = initialHiddenNodes; + + // Create a map to store original node data + const nodeDataMap = new Map(data.nodes.map((node) => [node.id, node])); + + // Add nodes to dagre graph with appropriate sizes + data.nodes.forEach((node) => { + const isFinding = node.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + g.setNode(node.id, { + label: node.id, + width: isFinding ? HEXAGON_WIDTH : NODE_WIDTH, + height: isFinding ? HEXAGON_HEIGHT : NODE_HEIGHT, + }); + }); + + // Add edges to dagre graph + if (data.edges && Array.isArray(data.edges)) { + data.edges.forEach((edge) => { + const source = edge.source; + const target = edge.target; + let sourceId = + typeof source === "string" + ? source + : typeof source === "object" && source !== null + ? (source as GraphNode).id + : ""; + let targetId = + typeof target === "string" + ? target + : typeof target === "object" && target !== null + ? (target as GraphNode).id + : ""; + + // Reverse container relationships for proper hierarchy + if (containerRelations.has(edge.type)) { + [sourceId, targetId] = [targetId, sourceId]; + } + + if (sourceId && targetId) { + g.setEdge(sourceId, targetId, { + originalSource: + typeof edge.source === "string" + ? edge.source + : (edge.source as GraphNode).id, + originalTarget: + typeof edge.target === "string" + ? edge.target + : (edge.target as GraphNode).id, + }); + } + }); + } + + // Run dagre layout + dagre.layout(g); + + // Draw edges + const edgesData: Array<{ + source: { x: number; y: number }; + target: { x: number; y: number }; + id: string; + sourceId: string; + targetId: string; + }> = []; + g.edges().forEach((e) => { + const sourceNode = g.node(e.v); + const targetNode = g.node(e.w); + + edgesData.push({ + source: { x: sourceNode.x, y: sourceNode.y }, + target: { x: targetNode.x, y: targetNode.y }, + id: `${e.v}-${e.w}`, + sourceId: e.v, + targetId: e.w, + }); + }); + + // Store edges data in ref for path highlighting + edgesDataRef.current = edgesData.map((e) => ({ + sourceId: e.sourceId, + targetId: e.targetId, + })); + + // Add defs for filters and markers FIRST (before using them) + const defs = svg.append("defs"); + + // Glow filter for nodes + const glowFilter = defs.append("filter").attr("id", "glow"); + glowFilter + .append("feGaussianBlur") + .attr("stdDeviation", "3") + .attr("result", "coloredBlur"); + const feMerge = glowFilter.append("feMerge"); + feMerge.append("feMergeNode").attr("in", "coloredBlur"); + feMerge.append("feMergeNode").attr("in", "SourceGraphic"); + + // Edge glow filter + const edgeGlowFilter = defs.append("filter").attr("id", "edgeGlow"); + edgeGlowFilter + .append("feGaussianBlur") + .attr("stdDeviation", "2") + .attr("result", "coloredBlur"); + const edgeFeMerge = edgeGlowFilter.append("feMerge"); + edgeFeMerge.append("feMergeNode").attr("in", "coloredBlur"); + edgeFeMerge.append("feMergeNode").attr("in", "SourceGraphic"); + + // Red glow filter for resources with findings + const redGlowFilter = defs.append("filter").attr("id", "redGlow"); + redGlowFilter + .append("feDropShadow") + .attr("dx", "0") + .attr("dy", "0") + .attr("stdDeviation", "4") + .attr("flood-color", GRAPH_ALERT_BORDER_COLOR) + .attr("flood-opacity", "0.6"); + + // Orange glow filter for selected/filtered node + const selectedGlowFilter = defs.append("filter").attr("id", "selectedGlow"); + selectedGlowFilter + .append("feDropShadow") + .attr("dx", "0") + .attr("dy", "0") + .attr("stdDeviation", "6") + .attr("flood-color", GRAPH_EDGE_HIGHLIGHT_COLOR) + .attr("flood-opacity", "0.8"); + + // Arrow marker (default white) - refX=10 places the arrow tip exactly at the line endpoint + defs + .append("marker") + .attr("id", "arrowhead") + .attr("viewBox", "0 0 10 10") + .attr("refX", 10) + .attr("refY", 5) + .attr("markerWidth", 6) + .attr("markerHeight", 6) + .attr("orient", "auto") + .append("path") + .attr("d", "M 0 0 L 10 5 L 0 10 z") + .attr("fill", GRAPH_EDGE_COLOR); + + // Arrow marker (highlighted orange) for hover state + defs + .append("marker") + .attr("id", "arrowhead-highlight") + .attr("viewBox", "0 0 10 10") + .attr("refX", 10) + .attr("refY", 5) + .attr("markerWidth", 6) + .attr("markerHeight", 6) + .attr("orient", "auto") + .append("path") + .attr("d", "M 0 0 L 10 5 L 0 10 z") + .attr("fill", GRAPH_EDGE_HIGHLIGHT_COLOR); + + // Add CSS animation for dashed lines, resource edge styles, and selected node pulse + svg.append("style").text(` + @keyframes dash { + to { + stroke-dashoffset: -20; + } + } + .animated-edge { + animation: dash 1s linear infinite; + } + .resource-edge { + stroke-opacity: 1; + } + @keyframes selectedPulse { + 0%, 100% { + stroke-opacity: 1; + stroke-width: 4px; + } + 50% { + stroke-opacity: 0.6; + stroke-width: 6px; + } + } + .selected-node { + animation: selectedPulse 1.2s ease-in-out infinite; + filter: url(#selectedGlow); + } + `); + + const linkGroup = container.append("g").attr("class", "links"); + + // Calculate edge endpoints based on node shape + const getEdgePoints = ( + sourceId: string, + targetId: string, + source: { x: number; y: number }, + target: { x: number; y: number }, + ) => { + const sourceNode = nodeDataMap.get(sourceId); + const targetNode = nodeDataMap.get(targetId); + + const sourceIsFinding = sourceNode?.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + const targetIsFinding = targetNode?.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + const sourceIsInternet = sourceNode?.labels.some( + (label) => label.toLowerCase() === "internet", + ); + const targetIsInternet = targetNode?.labels.some( + (label) => label.toLowerCase() === "internet", + ); + + // Get appropriate widths based on node type + // Internet nodes are circles with radius = NODE_HEIGHT * 0.8 + const sourceHalfWidth = sourceIsInternet + ? NODE_HEIGHT * 0.8 + : sourceIsFinding + ? HEXAGON_WIDTH / 2 + : NODE_WIDTH / 2; + const targetHalfWidth = targetIsInternet + ? NODE_HEIGHT * 0.8 + : targetIsFinding + ? HEXAGON_WIDTH / 2 + : NODE_WIDTH / 2; + + // Source exits from right side + const x1 = source.x + sourceHalfWidth; + const y1 = source.y; + + // Target enters from left side - line ends at node edge, arrow extends from there + const x2 = target.x - targetHalfWidth; + const y2 = target.y; + + return { x1, y1, x2, y2 }; + }; + + // Helper to check if a node is a finding + const isNodeFinding = (nodeId: string) => { + const node = nodeDataMap.get(nodeId); + return node?.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + }; + + const linkElements = linkGroup + .selectAll("line") + .data(edgesData) + .enter() + .append("line") + .attr( + "x1", + (d) => getEdgePoints(d.sourceId, d.targetId, d.source, d.target).x1, + ) + .attr( + "y1", + (d) => getEdgePoints(d.sourceId, d.targetId, d.source, d.target).y1, + ) + .attr( + "x2", + (d) => getEdgePoints(d.sourceId, d.targetId, d.source, d.target).x2, + ) + .attr( + "y2", + (d) => getEdgePoints(d.sourceId, d.targetId, d.source, d.target).y2, + ) + .attr("stroke", GRAPH_EDGE_COLOR) + .attr("stroke-width", 3) + .attr("stroke-linecap", "round") + .attr("stroke-dasharray", (d) => { + // Dashed lines only for edges connected to findings + const hasFinding = + isNodeFinding(d.sourceId) || isNodeFinding(d.targetId); + return hasFinding ? "8,6" : null; + }) + .attr("class", (d) => { + // Animate dashed lines + const hasFinding = + isNodeFinding(d.sourceId) || isNodeFinding(d.targetId); + return hasFinding ? "animated-edge" : "resource-edge"; + }) + .attr("marker-end", "url(#arrowhead)") + .style("visibility", (d) => { + const sourceIsFinding = isNodeFinding(d.sourceId); + const targetIsFinding = isNodeFinding(d.targetId); + + // Hide edges connected to findings in full view (shown when user clicks on a node or in filtered view) + if (!isFilteredView && (sourceIsFinding || targetIsFinding)) { + return "hidden"; + } + return "visible"; + }); + + // Store linkElements reference for hover interactions + // D3 selection types don't match our ref type exactly; safe cast for internal use + linkElementsRef.current = linkElements as unknown as ReturnType< + typeof select + >; + + // Draw nodes + const nodesData = g.nodes().map((v) => { + const node = g.node(v); + return { + id: v, + x: node.x, + y: node.y, + data: nodeDataMap.get(v)!, + }; + }); + + const nodeGroup = container.append("g").attr("class", "nodes"); + + const nodeElements = nodeGroup + .selectAll("g.node") + .data(nodesData) + .enter() + .append("g") + .attr("class", "node") + .attr("transform", (d) => `translate(${d.x},${d.y})`) + .attr("cursor", "pointer") + .style("display", (d) => { + // Hide findings in full view (they are shown when user clicks on a node or in filtered view) + return hiddenNodeIdsRef.current.has(d.id) ? "none" : null; + }) + .on("mouseenter", function (_event: PointerEvent, d) { + // Highlight entire path from this node + const pathEdges = getPathEdges(d.id, edgesData); + linkElements.each(function (edgeData) { + const edgeId = `${edgeData.sourceId}-${edgeData.targetId}`; + if (pathEdges.has(edgeId)) { + select(this) + .attr("stroke", GRAPH_EDGE_HIGHLIGHT_COLOR) + .attr("marker-end", "url(#arrowhead-highlight)"); + } + }); + + // Change node border to highlight color on hover + const nodeGroup = select(this); + const nodeShape = nodeGroup.select(".node-shape"); + const isFinding = d.data.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + const hasFindings = resourcesWithFindings.has(d.id); + + // Don't change border for resources with findings (keep red) + if (!hasFindings || isFinding) { + nodeShape.attr("stroke", GRAPH_EDGE_HIGHLIGHT_COLOR); + } + }) + .on("mouseleave", function (_event: PointerEvent, d) { + const selectedId = selectedNodeIdRef.current; + + // Reset edges: keep selected node's path highlighted + const selectedPathEdges = selectedId + ? getPathEdges(selectedId, edgesData) + : new Set(); + + linkElements.each(function (edgeData) { + const edgeId = `${edgeData.sourceId}-${edgeData.targetId}`; + if (selectedPathEdges.has(edgeId)) { + select(this) + .attr("stroke", GRAPH_EDGE_HIGHLIGHT_COLOR) + .attr("marker-end", "url(#arrowhead-highlight)"); + } else { + select(this) + .attr("stroke", GRAPH_EDGE_COLOR) + .attr("marker-end", "url(#arrowhead)"); + } + }); + + // Reset node border + const nodeGroup = select(this); + const nodeShape = nodeGroup.select(".node-shape"); + const isFinding = d.data.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + const hasFindings = resourcesWithFindings.has(d.id); + + // Determine the correct border color + if (!isFinding && hasFindings) { + nodeShape.attr("stroke", GRAPH_ALERT_BORDER_COLOR); + } else if (d.id === selectedId) { + nodeShape.attr("stroke", GRAPH_EDGE_HIGHLIGHT_COLOR); + } else { + nodeShape.attr( + "stroke", + getNodeBorderColor(d.data.labels, d.data.properties), + ); + } + }) + .on("click", function (event: PointerEvent, d) { + event.stopPropagation(); + + // Toggle visibility of connected finding nodes + const node = d.data; + const isFinding = node.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + + if (!isFinding) { + // Find connected findings for THIS node + const connectedFindings = new Set(); + data.edges?.forEach((edge) => { + const sourceId = + typeof edge.source === "string" + ? edge.source + : (edge.source as GraphNode).id; + const targetId = + typeof edge.target === "string" + ? edge.target + : (edge.target as GraphNode).id; + + if (sourceId === node.id || targetId === node.id) { + const otherId = sourceId === node.id ? targetId : sourceId; + const otherNode = data.nodes.find((n) => n.id === otherId); + if ( + otherNode?.labels.some((label) => + label.toLowerCase().includes("finding"), + ) + ) { + connectedFindings.add(otherId); + } + } + }); + + // Clear hidden nodes and hide ALL findings + hiddenNodeIdsRef.current.clear(); + data.nodes.forEach((n) => { + const isNodeFinding = n.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + if (isNodeFinding) { + hiddenNodeIdsRef.current.add(n.id); + } + }); + + // Show ONLY the findings connected to the clicked node + connectedFindings.forEach((findingId) => { + hiddenNodeIdsRef.current.delete(findingId); + }); + + // Update node visibility + nodeElements.style( + "display", + function (nodeData: { + id: string; + x: number; + y: number; + data: GraphNode; + }) { + return hiddenNodeIdsRef.current.has(nodeData.id) ? "none" : null; + }, + ); + + // Update edge visibility + linkElements.style( + "visibility", + function (edgeData: { + source: { x: number; y: number }; + target: { x: number; y: number }; + id: string; + sourceId: string; + targetId: string; + }) { + // Resource-to-resource edges are ALWAYS visible + const sourceIsFinding = isNodeFinding(edgeData.sourceId); + const targetIsFinding = isNodeFinding(edgeData.targetId); + + if (!sourceIsFinding && !targetIsFinding) { + return "visible"; + } + + // Finding edges only visible when finding is not hidden + return hiddenNodeIdsRef.current.has(edgeData.sourceId) || + hiddenNodeIdsRef.current.has(edgeData.targetId) + ? "hidden" + : "visible"; + }, + ); + + // Auto-adjust view to show the selected node and its findings + setTimeout(() => { + if ( + svgSelectionRef.current && + zoomBehaviorRef.current && + containerRef.current && + svgRef.current + ) { + // Calculate bounding box of visible nodes (clicked node + its findings) + const visibleNodeIds = new Set([ + node.id, + ...Array.from(connectedFindings), + ]); + const visibleNodesData = nodesData.filter((n) => + visibleNodeIds.has(n.id), + ); + + if (visibleNodesData.length > 0) { + // Find min/max coordinates of visible nodes + let minX = Infinity, + maxX = -Infinity, + minY = Infinity, + maxY = -Infinity; + visibleNodesData.forEach((n) => { + minX = Math.min(minX, n.x - NODE_WIDTH / 2); + maxX = Math.max(maxX, n.x + NODE_WIDTH / 2); + minY = Math.min(minY, n.y - NODE_HEIGHT / 2); + maxY = Math.max(maxY, n.y + NODE_HEIGHT / 2); + }); + + // Add padding + const padding = 80; + minX -= padding; + maxX += padding; + minY -= padding; + maxY += padding; + + // Get actual SVG dimensions from the DOM + const svgRect = svgRef.current.getBoundingClientRect(); + const fullWidth = svgRect.width; + const fullHeight = svgRect.height; + + const boxWidth = maxX - minX; + const boxHeight = maxY - minY; + const midX = minX + boxWidth / 2; + const midY = minY + boxHeight / 2; + + // Calculate scale to fit all visible nodes + const scale = + 0.9 / Math.max(boxWidth / fullWidth, boxHeight / fullHeight); + const tx = fullWidth / 2 - scale * midX; + const ty = fullHeight / 2 - scale * midY; + + svgSelectionRef.current + .transition() + .duration(500) + .call( + zoomBehaviorRef.current.transform, + zoomIdentity.translate(tx, ty).scale(scale), + ); + } + } + }, 50); + } + + onNodeClickRef.current?.(d.data); + }); + + // Add tooltip + nodeElements.append("title").text((d: (typeof nodesData)[0]): string => { + const isFinding = d.data.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + const label = + d.data.labels && d.data.labels.length > 0 + ? formatNodeLabel(d.data.labels[0]) + : d.id; + + if (isFinding) { + return `${label}\nClick to view finding details`; + } else { + return `${label}\nClick to view related findings`; + } + }); + + // Build a set of resource nodes that have findings connected to them + const resourcesWithFindings = new Set(); + data.edges?.forEach((edge) => { + const sourceId = + typeof edge.source === "string" + ? edge.source + : (edge.source as GraphNode).id; + const targetId = + typeof edge.target === "string" + ? edge.target + : (edge.target as GraphNode).id; + + const sourceNode = nodeDataMap.get(sourceId); + const targetNode = nodeDataMap.get(targetId); + + const sourceIsFinding = sourceNode?.labels.some((l) => + l.toLowerCase().includes("finding"), + ); + const targetIsFinding = targetNode?.labels.some((l) => + l.toLowerCase().includes("finding"), + ); + + // If one end is a finding, the other is a resource with findings + if (sourceIsFinding && !targetIsFinding) { + resourcesWithFindings.add(targetId); + } + if (targetIsFinding && !sourceIsFinding) { + resourcesWithFindings.add(sourceId); + } + }); + + // Store in ref for use in selection updates + resourcesWithFindingsRef.current = resourcesWithFindings; + + // Add shapes - hexagons for findings, rounded pill shapes for resources + nodeElements.each(function (d) { + const group = select(this); + const isFinding = d.data.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + const nodeColor = getNodeColor(d.data.labels, d.data.properties); + const borderColor = getNodeBorderColor(d.data.labels, d.data.properties); + const hasFindings = resourcesWithFindings.has(d.id); + + if (isFinding) { + // Hexagon for findings - always has glow + const w = HEXAGON_WIDTH; + const h = HEXAGON_HEIGHT; + const sideInset = w * 0.15; + const hexPath = ` + M ${-w / 2 + sideInset} ${-h / 2} + L ${w / 2 - sideInset} ${-h / 2} + L ${w / 2} 0 + L ${w / 2 - sideInset} ${h / 2} + L ${-w / 2 + sideInset} ${h / 2} + L ${-w / 2} 0 + Z + `; + const isSelected = d.id === selectedNodeId; + group + .append("path") + .attr("d", hexPath) + .attr("fill", nodeColor) + .attr("fill-opacity", 0.85) + .attr("stroke", isSelected ? GRAPH_EDGE_HIGHLIGHT_COLOR : borderColor) + .attr("stroke-width", isSelected ? 4 : 2) + .attr("filter", isSelected ? "url(#selectedGlow)" : "url(#glow)") + .attr( + "class", + isSelected ? "node-shape selected-node" : "node-shape", + ); + } else { + // Check if this is an Internet node + const isInternet = d.data.labels.some( + (label) => label.toLowerCase() === "internet", + ); + + const isSelected = d.id === selectedNodeId; + + // Resources with findings get red border and red glow (even when selected) + // Selected nodes get orange border + const strokeColor = hasFindings + ? GRAPH_ALERT_BORDER_COLOR + : isSelected + ? GRAPH_EDGE_HIGHLIGHT_COLOR + : borderColor; + + // Determine filter: selected takes priority, then hasFindings, then default + const nodeFilter = isSelected + ? "url(#selectedGlow)" + : hasFindings + ? "url(#redGlow)" + : "url(#glow)"; + + const nodeClass = isSelected + ? "node-shape selected-node" + : "node-shape"; + + if (isInternet) { + // Globe shape for Internet nodes - larger than regular nodes + const radius = NODE_HEIGHT * 0.8; + + // Main circle + group + .append("circle") + .attr("cx", 0) + .attr("cy", 0) + .attr("r", radius) + .attr("fill", nodeColor) + .attr("fill-opacity", 0.85) + .attr("stroke", strokeColor) + .attr("stroke-width", isSelected ? 4 : hasFindings ? 2.5 : 1.5) + .attr("filter", nodeFilter) + .attr("class", nodeClass); + + // Horizontal ellipse (equator) + group + .append("ellipse") + .attr("cx", 0) + .attr("cy", 0) + .attr("rx", radius) + .attr("ry", radius * 0.35) + .attr("fill", "none") + .attr("stroke", strokeColor) + .attr("stroke-width", 1) + .attr("stroke-opacity", 0.5); + + // Vertical ellipse (meridian) + group + .append("ellipse") + .attr("cx", 0) + .attr("cy", 0) + .attr("rx", radius * 0.35) + .attr("ry", radius) + .attr("fill", "none") + .attr("stroke", strokeColor) + .attr("stroke-width", 1) + .attr("stroke-opacity", 0.5); + } else { + // Rounded pill shape for other resources + group + .append("rect") + .attr("x", -NODE_WIDTH / 2) + .attr("y", -NODE_HEIGHT / 2) + .attr("width", NODE_WIDTH) + .attr("height", NODE_HEIGHT) + .attr("rx", NODE_RADIUS) + .attr("ry", NODE_RADIUS) + .attr("fill", nodeColor) + .attr("fill-opacity", 0.85) + .attr("stroke", strokeColor) + .attr("stroke-width", isSelected ? 4 : hasFindings ? 2.5 : 1.5) + .attr("filter", nodeFilter) + .attr("class", nodeClass); + } + } + }); + + // Store references for updating selection later + const nodeShapes = nodeElements.selectAll(".node-shape"); + nodeShapesRef.current = nodeShapes as unknown as ReturnType< + typeof select + >; + + // Add label text - white text on all nodes (backgrounds are dark enough) + nodeElements.each(function (d) { + const group = select(this); + const isFinding = d.data.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + + // Create text container - white text with shadow for readability + const textGroup = group + .append("text") + .attr("pointer-events", "none") + .attr("text-anchor", "middle") + .attr("dominant-baseline", "middle") + .attr("fill", "#ffffff") + .style("text-shadow", "0 1px 2px rgba(0,0,0,0.5)"); + + if (isFinding) { + // For findings: show check_title/name (severity is shown by color) + const title = String( + d.data.properties?.check_title || + d.data.properties?.name || + d.data.properties?.id || + "Finding", + ); + const maxChars = 24; + const displayTitle = + title.length > maxChars + ? title.substring(0, maxChars) + "..." + : title; + + textGroup + .append("tspan") + .attr("x", 0) + .attr("font-size", "11px") + .attr("font-weight", "600") + .text(displayTitle); + } else { + // For resources: show name with type below + const name = String( + d.data.properties?.name || + d.data.properties?.id || + (d.data.labels && d.data.labels.length > 0 + ? formatNodeLabel(d.data.labels[0]) + : "Unknown"), + ); + const maxChars = 22; + const displayName = + name.length > maxChars ? name.substring(0, maxChars) + "..." : name; + + // Name + textGroup + .append("tspan") + .attr("x", 0) + .attr("dy", "-0.3em") + .attr("font-size", "11px") + .attr("font-weight", "600") + .text(displayName); + + // Type label - slightly transparent white + const type = + d.data.labels && d.data.labels.length > 0 + ? formatNodeLabel(d.data.labels[0]) + : ""; + if (type) { + textGroup + .append("tspan") + .attr("x", 0) + .attr("dy", "1.3em") + .attr("font-size", "9px") + .attr("fill", "rgba(255,255,255,0.8)") + .text(type); + } + } + }); + + // Add zoom behavior + const zoomBehavior = zoom().on( + "zoom", + (event: D3ZoomEvent) => { + const transform = event.transform; + container.attr("transform", transform.toString()); + setZoomLevel(transform.k); + }, + ); + zoomBehaviorRef.current = zoomBehavior; + + svg.call(zoomBehavior); + + // Enable Ctrl + mouse wheel zoom only (disable regular scroll zoom) + svg.on("wheel.zoom", null); + svg.on("dblclick.zoom", null); + + // Custom wheel handler that only zooms when Ctrl is pressed + svg.on("wheel", function (event: WheelEvent) { + if (event.ctrlKey || event.metaKey) { + event.preventDefault(); + const currentTransform = container.attr("transform"); + const k = currentTransform + ? parseFloat(currentTransform.match(/scale\(([^)]+)\)/)?.[1] || "1") + : 1; + const scaleFactor = event.deltaY > 0 ? 0.75 : 1.35; + const newK = Math.max(0.1, Math.min(10, k * scaleFactor)); + + if (zoomBehaviorRef.current && svgSelectionRef.current) { + const svgNode = svgRef.current; + if (svgNode) { + const rect = svgNode.getBoundingClientRect(); + const mouseX = event.clientX - rect.left; + const mouseY = event.clientY - rect.top; + + svgSelectionRef.current + .transition() + .duration(100) + .call(zoomBehaviorRef.current.scaleTo, newK, [mouseX, mouseY]); + } + } + } + }); + + // Auto-fit to screen + setTimeout(() => { + if ( + svgSelectionRef.current && + zoomBehaviorRef.current && + containerRef.current + ) { + const bounds = containerRef.current.node()?.getBBox(); + if (!bounds) return; + + const fullWidth = svgRef.current?.clientWidth || 800; + const fullHeight = svgRef.current?.clientHeight || 500; + + const midX = bounds.x + bounds.width / 2; + const midY = bounds.y + bounds.height / 2; + const scale = + 0.8 / Math.max(bounds.width / fullWidth, bounds.height / fullHeight); + const tx = fullWidth / 2 - scale * midX; + const ty = fullHeight / 2 - scale * midY; + + svgSelectionRef.current.call( + zoomBehaviorRef.current.transform, + zoomIdentity.translate(tx, ty).scale(scale), + ); + } + }, 100); + // D3's imperative rendering model requires controlled re-renders. + // We intentionally only re-render on data/view changes, not on callback refs + // (onNodeClick, selectedNodeId) which would cause unnecessary D3 re-renders. + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [data, isFilteredView]); + + return ( + + ); +}); + +AttackPathGraphComponent.displayName = "AttackPathGraph"; + +export const AttackPathGraph = AttackPathGraphComponent; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-controls.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-controls.tsx new file mode 100644 index 0000000000..872cd57445 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-controls.tsx @@ -0,0 +1,93 @@ +"use client"; + +import { Download, Minimize2, ZoomIn, ZoomOut } from "lucide-react"; + +import { Button } from "@/components/shadcn"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/shadcn/tooltip"; + +interface GraphControlsProps { + onZoomIn: () => void; + onZoomOut: () => void; + onFitToScreen: () => void; + onExport: () => void; +} + +/** + * Controls for graph visualization (zoom, pan, export) + * Positioned as floating toolbar above graph + */ +export const GraphControls = ({ + onZoomIn, + onZoomOut, + onFitToScreen, + onExport, +}: GraphControlsProps) => { + return ( +
+
+ + + + + + Zoom in + + + + + + + Zoom out + + + + + + + Fit graph to view + + + + + + + Export graph + + +
+
+ ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-legend.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-legend.tsx new file mode 100644 index 0000000000..f9c570cc89 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-legend.tsx @@ -0,0 +1,508 @@ +"use client"; + +import { Card, CardContent } from "@/components/shadcn"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/shadcn/tooltip"; +import type { AttackPathGraphData } from "@/types/attack-paths"; + +import { + getNodeBorderColor, + getNodeColor, + GRAPH_EDGE_COLOR, + GRAPH_NODE_BORDER_COLORS, + GRAPH_NODE_COLORS, +} from "../../_lib/graph-colors"; + +interface LegendItem { + label: string; + color: string; + borderColor: string; + description: string; + shape: "rectangle" | "hexagon" | "cloud"; +} + +// Map node labels to human-readable names and descriptions +const nodeTypeDescriptions: Record< + string, + { name: string; description: string } +> = { + // Findings + ProwlerFinding: { + name: "Finding", + description: "Security findings from Prowler scans", + }, + // AWS Account + AWSAccount: { + name: "AWS Account", + description: "AWS account root node", + }, + // Compute + EC2Instance: { + name: "EC2 Instance", + description: "Elastic Compute Cloud instance", + }, + LambdaFunction: { + name: "Lambda Function", + description: "AWS Lambda serverless function", + }, + // Storage + S3Bucket: { + name: "S3 Bucket", + description: "Simple Storage Service bucket", + }, + // IAM + IAMRole: { + name: "IAM Role", + description: "Identity and Access Management role", + }, + IAMPolicy: { + name: "IAM Policy", + description: "Identity and Access Management policy", + }, + AWSRole: { + name: "AWS Role", + description: "AWS IAM role", + }, + AWSPolicy: { + name: "AWS Policy", + description: "AWS IAM policy", + }, + AWSInlinePolicy: { + name: "AWS Inline Policy", + description: "AWS IAM inline policy", + }, + AWSPolicyStatement: { + name: "AWS Policy Statement", + description: "AWS IAM policy statement", + }, + AWSPrincipal: { + name: "AWS Principal", + description: "AWS IAM principal entity", + }, + // Networking + SecurityGroup: { + name: "Security Group", + description: "AWS security group for network access control", + }, + EC2SecurityGroup: { + name: "EC2 Security Group", + description: "EC2 security group for network access control", + }, + IpPermissionInbound: { + name: "IP Permission Inbound", + description: "Inbound IP permission rule", + }, + IpRule: { + name: "IP Rule", + description: "IP address rule", + }, + Internet: { + name: "Internet", + description: "Internet gateway or public access", + }, + // Tags + AWSTag: { + name: "AWS Tag", + description: "AWS resource tag", + }, + Tag: { + name: "Tag", + description: "Resource tag", + }, +}; + +/** + * Extract unique node types from graph data + */ +function extractNodeTypes( + nodes: AttackPathGraphData["nodes"] | undefined, +): string[] { + if (!nodes) return []; + + const nodeTypes = new Set(); + nodes.forEach((node) => { + node.labels.forEach((label) => { + nodeTypes.add(label); + }); + }); + + return Array.from(nodeTypes).sort(); +} + +/** + * Severity legend items - colors work in both light and dark themes + */ +const severityLegendItems: LegendItem[] = [ + { + label: "Critical", + color: GRAPH_NODE_COLORS.critical, + borderColor: GRAPH_NODE_BORDER_COLORS.critical, + description: "Critical severity finding", + shape: "hexagon", + }, + { + label: "High", + color: GRAPH_NODE_COLORS.high, + borderColor: GRAPH_NODE_BORDER_COLORS.high, + description: "High severity finding", + shape: "hexagon", + }, + { + label: "Medium", + color: GRAPH_NODE_COLORS.medium, + borderColor: GRAPH_NODE_BORDER_COLORS.medium, + description: "Medium severity finding", + shape: "hexagon", + }, + { + label: "Low", + color: GRAPH_NODE_COLORS.low, + borderColor: GRAPH_NODE_BORDER_COLORS.low, + description: "Low severity finding", + shape: "hexagon", + }, +]; + +/** + * Generate legend items from graph data + */ +function generateLegendItems( + nodeTypes: string[], + hasFindings: boolean, +): LegendItem[] { + const items: LegendItem[] = []; + const seenTypes = new Set(); + + // Add severity items if there are findings + if (hasFindings) { + items.push(...severityLegendItems); + } + + // Helper to format unknown node types (e.g., "AWSPolicyStatement" -> "AWS Policy Statement") + const formatNodeTypeName = (nodeType: string): string => { + return nodeType + .replace(/([A-Z])/g, " $1") // Add space before capitals + .replace(/^ /, "") // Remove leading space + .replace(/AWS /g, "AWS ") // Keep AWS together + .replace(/EC2 /g, "EC2 ") // Keep EC2 together + .replace(/S3 /g, "S3 ") // Keep S3 together + .replace(/IAM /g, "IAM ") // Keep IAM together + .replace(/IP /g, "IP ") // Keep IP together + .trim(); + }; + + nodeTypes.forEach((nodeType) => { + if (seenTypes.has(nodeType)) return; + seenTypes.add(nodeType); + + // Skip findings - we show severity colors instead + const isFinding = nodeType.toLowerCase().includes("finding"); + if (isFinding) return; + + const description = nodeTypeDescriptions[nodeType]; + + // Determine shape based on node type + const isInternet = nodeType.toLowerCase() === "internet"; + const shape: "rectangle" | "hexagon" | "cloud" = isInternet + ? "cloud" + : "rectangle"; + + if (description) { + items.push({ + label: description.name, + color: getNodeColor([nodeType]), + borderColor: getNodeBorderColor([nodeType]), + description: description.description, + shape, + }); + } else { + // Format unknown node types nicely + const formattedName = formatNodeTypeName(nodeType); + items.push({ + label: formattedName, + color: getNodeColor([nodeType]), + borderColor: getNodeBorderColor([nodeType]), + description: `${formattedName} node`, + shape, + }); + } + }); + + return items; +} + +/** + * Hexagon shape component for legend + */ +const HexagonShape = ({ + color, + borderColor, +}: { + color: string; + borderColor: string; +}) => ( + +); + +/** + * Pill shape component for legend + */ +const PillShape = ({ + color, + borderColor, +}: { + color: string; + borderColor: string; +}) => ( + +); + +/** + * Globe shape component for legend (used for Internet nodes) + */ +const GlobeShape = ({ + color, + borderColor, +}: { + color: string; + borderColor: string; +}) => ( + +); + +/** + * Edge line component for legend + */ +const EdgeLine = ({ dashed }: { dashed: boolean }) => ( + +); + +interface GraphLegendProps { + data?: AttackPathGraphData; +} + +/** + * Legend for attack path graph node types and edge styles + */ +export const GraphLegend = ({ data }: GraphLegendProps) => { + const nodeTypes = extractNodeTypes(data?.nodes); + + // Check if there are any findings in the data + const hasFindings = nodeTypes.some((type) => + type.toLowerCase().includes("finding"), + ); + + const legendItems = generateLegendItems(nodeTypes, hasFindings); + + if (legendItems.length === 0) { + return null; + } + + return ( + + +
+ {/* Node types section */} +
+ + {legendItems.map((item) => ( + + +
+ {item.shape === "hexagon" ? ( + + ) : item.shape === "cloud" ? ( + + ) : ( + + )} + + {item.label} + +
+
+ {item.description} +
+ ))} +
+
+ + {/* Edge types section */} +
+ + + +
+ + + Resource Connection + +
+
+ + Connection between infrastructure resources + +
+ + {hasFindings && ( + + +
+ + + Finding Connection + +
+
+ + Connection to a security finding + +
+ )} +
+
+ + {/* Zoom control hint */} +
+ + Ctrl + + + + + Scroll to zoom + +
+
+
+
+ ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-loading.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-loading.tsx new file mode 100644 index 0000000000..cf56231a05 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/graph-loading.tsx @@ -0,0 +1,24 @@ +"use client"; + +import { Skeleton } from "@/components/shadcn/skeleton/skeleton"; + +/** + * Loading skeleton for graph visualization + * Shows while graph data is being fetched and processed + */ +export const GraphLoading = () => { + return ( +
+
+
+ + + +
+

+ Loading Attack Paths graph... +

+
+
+ ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/index.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/index.ts new file mode 100644 index 0000000000..ae529f31cd --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/graph/index.ts @@ -0,0 +1,5 @@ +export type { AttackPathGraphRef } from "./attack-path-graph"; +export { AttackPathGraph } from "./attack-path-graph"; +export { GraphControls } from "./graph-controls"; +export { GraphLegend } from "./graph-legend"; +export { GraphLoading } from "./graph-loading"; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/index.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/index.ts new file mode 100644 index 0000000000..eac86fccc7 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/index.ts @@ -0,0 +1,7 @@ +export { ExecuteButton } from "./execute-button"; +export * from "./graph"; +export * from "./node-detail"; +export { QueryParametersForm } from "./query-parameters-form"; +export { QuerySelector } from "./query-selector"; +export { ScanListTable } from "./scan-list-table"; +export { ScanStatusBadge } from "./scan-status-badge"; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/index.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/index.ts new file mode 100644 index 0000000000..c5895fe51f --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/index.ts @@ -0,0 +1,4 @@ +export { NodeDetailContent, NodeDetailPanel } from "./node-detail-panel"; +export { NodeOverview } from "./node-overview"; +export { NodeRelationships } from "./node-relationships"; +export { NodeRemediation } from "./node-remediation"; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-detail-panel.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-detail-panel.tsx new file mode 100644 index 0000000000..4d8885e65c --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-detail-panel.tsx @@ -0,0 +1,132 @@ +"use client"; + +import { Button, Card, CardContent } from "@/components/shadcn"; +import { + Sheet, + SheetContent, + SheetDescription, + SheetHeader, + SheetTitle, +} from "@/components/ui/sheet/sheet"; +import type { GraphNode } from "@/types/attack-paths"; + +import { NodeFindings } from "./node-findings"; +import { NodeOverview } from "./node-overview"; +import { NodeResources } from "./node-resources"; + +interface NodeDetailPanelProps { + node: GraphNode | null; + allNodes?: GraphNode[]; + onClose?: () => void; +} + +/** + * Node details content component (reusable) + */ +export const NodeDetailContent = ({ + node, + allNodes = [], +}: { + node: GraphNode; + allNodes?: GraphNode[]; +}) => { + const isProwlerFinding = node?.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + + return ( +
+ {/* Node Overview Section */} + + +

+ Node Overview +

+ +
+
+ + {/* Related Findings Section - Only show for non-Finding nodes */} + {!isProwlerFinding && ( + + +

+ Related Findings +

+
+ Findings connected to this node +
+ +
+
+ )} + + {/* Affected Resources Section - Only show for Finding nodes */} + {isProwlerFinding && ( + + +

+ Affected Resources +

+
+ Resources affected by this finding +
+ +
+
+ )} +
+ ); +}; + +/** + * Right-side sheet panel for node details + * Shows comprehensive information about selected graph node + * Uses shadcn Sheet component for sliding panel from right + */ +export const NodeDetailPanel = ({ + node, + allNodes = [], + onClose, +}: NodeDetailPanelProps) => { + const isOpen = node !== null; + + const isProwlerFinding = node?.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + + return ( + !open && onClose?.()}> + + +
+
+ Node Details + + {String(node?.properties?.name || node?.id.substring(0, 20))} + +
+ {node && isProwlerFinding && ( + + )} +
+
+ + {node && ( +
+ +
+ )} +
+
+ ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-findings.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-findings.tsx new file mode 100644 index 0000000000..bb424a818c --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-findings.tsx @@ -0,0 +1,102 @@ +"use client"; + +import { SeverityBadge } from "@/components/ui/table/severity-badge"; +import type { GraphNode } from "@/types/attack-paths"; + +const SEVERITY_LEVELS = { + informational: "informational", + low: "low", + medium: "medium", + high: "high", + critical: "critical", +} as const; + +type Severity = (typeof SEVERITY_LEVELS)[keyof typeof SEVERITY_LEVELS]; + +interface NodeFindingsProps { + node: GraphNode; + allNodes?: GraphNode[]; +} + +/** + * Node findings section showing related findings for the selected node + * Displays findings that are connected to the node via HAS_FINDING edges + */ +export const NodeFindings = ({ node, allNodes = [] }: NodeFindingsProps) => { + // Get finding IDs from the node's findings array (populated by adapter) + const findingIds = node.findings || []; + + // Get the actual finding nodes + const findingNodes = allNodes.filter((n) => findingIds.includes(n.id)); + + if (findingNodes.length === 0) { + return null; + } + + const normalizeSeverity = ( + severity?: string | number | boolean | string[] | number[] | null, + ): Severity => { + const sev = String( + Array.isArray(severity) ? severity[0] : severity || "", + ).toLowerCase(); + if (sev in SEVERITY_LEVELS) { + return sev as Severity; + } + return "informational"; + }; + + return ( +
    + {findingNodes.map((finding) => { + // Get the finding name (check_title preferred, then name) + const findingName = String( + finding.properties?.check_title || + finding.properties?.name || + finding.properties?.finding_id || + "Unknown Finding", + ); + // Use properties.id for display, fallback to graph node id + const findingId = String(finding.properties?.id || finding.id); + + return ( +
  • +
    +
    +
    + {finding.properties?.severity && ( + + )} +
    + {findingName} +
    +
    +

    + ID: {findingId} +

    +
    + + View Full Finding → + +
    + {finding.properties?.description && ( +
    + {String(finding.properties.description)} +
    + )} +
  • + ); + })} +
+ ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-overview.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-overview.tsx new file mode 100644 index 0000000000..e614ce521a --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-overview.tsx @@ -0,0 +1,109 @@ +"use client"; + +import { CodeSnippet } from "@/components/ui/code-snippet/code-snippet"; +import { InfoField } from "@/components/ui/entities"; +import { DateWithTime } from "@/components/ui/entities/date-with-time"; +import type { GraphNode, GraphNodePropertyValue } from "@/types/attack-paths"; + +import { formatNodeLabels } from "../../_lib"; + +interface NodeOverviewProps { + node: GraphNode; +} + +/** + * Node overview section showing basic node information + */ +export const NodeOverview = ({ node }: NodeOverviewProps) => { + const renderValue = (value: GraphNodePropertyValue) => { + if (value === null || value === undefined || value === "") { + return "-"; + } + if (Array.isArray(value)) { + return value.join(", "); + } + return String(value); + }; + + const isFinding = node.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + + return ( +
+
+ {formatNodeLabels(node.labels)} + {isFinding && node.properties.check_title && ( + + {String(node.properties.check_title)} + + )} + {isFinding && node.properties.id && ( + + + + )} +
+ + {/* Display all properties */} +
+

+ Properties +

+
+ {Object.entries(node.properties).map(([key, value]) => { + // Skip internal properties + if (key.startsWith("_")) { + return null; + } + + // Skip check_title and id for findings as they're shown prominently above + if (isFinding && (key === "check_title" || key === "id")) { + return null; + } + + // Format timestamp values + const isTimestamp = + key.includes("date") || + key.includes("time") || + key.includes("at") || + key.includes("seen"); + + return ( + + {isTimestamp && typeof value === "number" ? ( + + ) : isTimestamp && + typeof value === "string" && + value.match(/^\d+$/) ? ( + + ) : typeof value === "object" ? ( + + {JSON.stringify(value).substring(0, 50)}... + + ) : ( + renderValue(value) + )} + + ); + })} +
+
+
+ ); +}; + +// Helper function to format property names +function formatPropertyName(name: string): string { + return name + .replace(/([A-Z])/g, " $1") + .replace(/_/g, " ") + .replace(/\b\w/g, (l) => l.toUpperCase()) + .trim(); +} diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-relationships.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-relationships.tsx new file mode 100644 index 0000000000..7370204990 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-relationships.tsx @@ -0,0 +1,105 @@ +"use client"; + +import { cn } from "@/lib/utils"; +import type { GraphEdge } from "@/types/attack-paths"; + +interface NodeRelationshipsProps { + incomingEdges: GraphEdge[]; + outgoingEdges: GraphEdge[]; +} + +/** + * Format edge type to human-readable label + * e.g., "HAS_FINDING" -> "Has Finding" + */ +function formatEdgeType(edgeType: string): string { + return edgeType + .split("_") + .map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()) + .join(" "); +} + +interface EdgeItemProps { + edge: GraphEdge; + isOutgoing: boolean; +} + +/** + * Reusable edge item component + */ +function EdgeItem({ edge, isOutgoing }: EdgeItemProps) { + const targetId = + typeof edge.target === "string" ? edge.target : String(edge.target); + const sourceId = + typeof edge.source === "string" ? edge.source : String(edge.source); + const displayId = (isOutgoing ? targetId : sourceId).substring(0, 30); + + return ( +
+ + {displayId} + + + {formatEdgeType(edge.type)} + +
+ ); +} + +/** + * Node relationships section showing incoming and outgoing edges + */ +export const NodeRelationships = ({ + incomingEdges, + outgoingEdges, +}: NodeRelationshipsProps) => { + return ( +
+ {/* Outgoing Relationships */} +
+

+ Outgoing Relationships ({outgoingEdges.length}) +

+ {outgoingEdges.length > 0 ? ( +
+ {outgoingEdges.map((edge) => ( + + ))} +
+ ) : ( +

+ No outgoing relationships +

+ )} +
+ + {/* Incoming Relationships */} +
+

+ Incoming Relationships ({incomingEdges.length}) +

+ {incomingEdges.length > 0 ? ( +
+ {incomingEdges.map((edge) => ( + + ))} +
+ ) : ( +

+ No incoming relationships +

+ )} +
+
+ ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-remediation.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-remediation.tsx new file mode 100644 index 0000000000..e3421e5a43 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-remediation.tsx @@ -0,0 +1,83 @@ +"use client"; + +import Link from "next/link"; + +import { Badge } from "@/components/shadcn/badge/badge"; + +interface Finding { + id: string; + title: string; + severity: "critical" | "high" | "medium" | "low" | "info"; + status: "PASS" | "FAIL" | "MANUAL"; +} + +interface NodeRemediationProps { + findings: Finding[]; +} + +/** + * Node remediation section showing related Prowler findings + */ +export const NodeRemediation = ({ findings }: NodeRemediationProps) => { + const getSeverityVariant = (severity: string) => { + switch (severity) { + case "critical": + return "destructive"; + case "high": + return "default"; + case "medium": + return "secondary"; + case "low": + return "outline"; + default: + return "default"; + } + }; + + const getStatusVariant = (status: string) => { + if (status === "PASS") return "default"; + if (status === "FAIL") return "destructive"; + return "secondary"; + }; + + return ( +
+ {findings.map((finding) => ( +
+
+
+
+ {finding.title} +
+

+ ID: {finding.id.substring(0, 12)}... +

+
+
+ + {finding.severity} + + + {finding.status} + +
+
+
+ + View Full Finding → + +
+
+ ))} +
+ ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-resources.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-resources.tsx new file mode 100644 index 0000000000..47f1f8db5f --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/node-detail/node-resources.tsx @@ -0,0 +1,85 @@ +"use client"; + +import { Badge } from "@/components/shadcn/badge/badge"; +import { cn } from "@/lib/utils"; +import type { GraphNode } from "@/types/attack-paths"; + +interface NodeResourcesProps { + node: GraphNode; + allNodes?: GraphNode[]; +} + +/** + * Node resources section showing affected resources for the selected finding node + * Displays resources that are connected to the finding node via HAS_FINDING edges + */ +export const NodeResources = ({ node, allNodes = [] }: NodeResourcesProps) => { + // Get resource IDs from the node's resources array (populated by adapter) + const resourceIds = node.resources || []; + + // Get the actual resource nodes + const resourceNodes = allNodes.filter((n) => resourceIds.includes(n.id)); + + if (resourceNodes.length === 0) { + return null; + } + + const getResourceTypeColor = (labels: string[]): string => { + const label = (labels[0] || "").toLowerCase(); + switch (label) { + case "s3bucket": + case "awsaccount": + case "ec2instance": + case "iamrole": + case "lambdafunction": + case "securitygroup": + return "bg-bg-data-aws"; + default: + return "bg-bg-data-muted"; + } + }; + + return ( +
    + {resourceNodes.map((resource) => { + // Use properties.id for display, fallback to graph node id + const resourceId = String(resource.properties?.id || resource.id); + + return ( +
  • +
    +
    +
    + {resource.labels && ( + + {resource.labels[0]} + + )} +
    + {String(resource.properties?.name || resourceId)} +
    +
    +

    + ID: {resourceId} +

    +
    +
    + {resource.properties?.arn && ( +
    + ARN: {String(resource.properties.arn)} +
    + )} +
  • + ); + })} +
+ ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/query-parameters-form.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/query-parameters-form.tsx new file mode 100644 index 0000000000..ccbcf60547 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/query-parameters-form.tsx @@ -0,0 +1,122 @@ +"use client"; + +import { Controller, useFormContext } from "react-hook-form"; + +import type { AttackPathQuery } from "@/types/attack-paths"; + +interface QueryParametersFormProps { + selectedQuery: AttackPathQuery | null | undefined; +} + +/** + * Dynamic form component for query parameters + * Renders form fields based on selected query's parameters + */ +export const QueryParametersForm = ({ + selectedQuery, +}: QueryParametersFormProps) => { + const { + control, + formState: { errors }, + } = useFormContext(); + + if (!selectedQuery || !selectedQuery.attributes.parameters.length) { + return ( +
+

+ This query requires no parameters. Click "Execute Query" to + proceed. +

+
+ ); + } + + return ( +
+

+ Query Parameters +

+ + {selectedQuery.attributes.parameters.map((param) => ( + { + if (param.data_type === "boolean") { + return ( +
+ +
+ ); + } + + const errorMessage = (() => { + const error = errors[param.name]; + if (error && typeof error.message === "string") { + return error.message; + } + return undefined; + })(); + + const descriptionId = `${param.name}-description`; + return ( +
+ + + {param.description && ( + + {param.description} + + )} + {errorMessage && ( + {errorMessage} + )} +
+ ); + }} + /> + ))} +
+ ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/query-selector.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/query-selector.tsx new file mode 100644 index 0000000000..65acb8a84a --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/query-selector.tsx @@ -0,0 +1,46 @@ +"use client"; + +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/shadcn"; +import type { AttackPathQuery } from "@/types/attack-paths"; + +interface QuerySelectorProps { + queries: AttackPathQuery[]; + selectedQueryId: string | null; + onQueryChange: (queryId: string) => void; +} + +/** + * Query selector dropdown component + * Allows users to select from available Attack Paths queries + */ +export const QuerySelector = ({ + queries, + selectedQueryId, + onQueryChange, +}: QuerySelectorProps) => { + return ( + + ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/scan-list-table.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/scan-list-table.tsx new file mode 100644 index 0000000000..8e10305617 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/scan-list-table.tsx @@ -0,0 +1,350 @@ +"use client"; + +import { + ChevronLeftIcon, + ChevronRightIcon, + DoubleArrowLeftIcon, + DoubleArrowRightIcon, +} from "@radix-ui/react-icons"; +import Link from "next/link"; +import { usePathname, useRouter, useSearchParams } from "next/navigation"; +import { useState } from "react"; + +import { Button } from "@/components/shadcn/button/button"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/shadcn/select/select"; +import { DateWithTime } from "@/components/ui/entities/date-with-time"; +import { EntityInfo } from "@/components/ui/entities/entity-info"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@/components/ui/table"; +import { cn } from "@/lib/utils"; +import type { ProviderType } from "@/types"; +import type { AttackPathScan } from "@/types/attack-paths"; +import { SCAN_STATES } from "@/types/attack-paths"; + +import { ScanStatusBadge } from "./scan-status-badge"; + +interface ScanListTableProps { + scans: AttackPathScan[]; +} + +const TABLE_COLUMN_COUNT = 6; +const DEFAULT_PAGE_SIZE = 5; +const PAGE_SIZE_OPTIONS = [2, 5, 10, 15]; + +const baseLinkClass = + "relative block rounded border-0 bg-transparent px-3 py-1.5 text-button-primary outline-none transition-all duration-300 hover:bg-bg-neutral-tertiary hover:text-text-neutral-primary focus:shadow-none dark:hover:bg-bg-neutral-secondary dark:hover:text-text-neutral-primary"; + +const disabledLinkClass = + "text-border-neutral-secondary dark:text-border-neutral-secondary hover:bg-transparent hover:text-border-neutral-secondary dark:hover:text-border-neutral-secondary cursor-default pointer-events-none"; + +/** + * Table displaying AWS account Attack Paths scans + * Shows scan metadata and allows selection of completed scans + */ +export const ScanListTable = ({ scans }: ScanListTableProps) => { + const pathname = usePathname(); + const searchParams = useSearchParams(); + const router = useRouter(); + + const selectedScanId = searchParams.get("scanId"); + const currentPage = parseInt(searchParams.get("scanPage") ?? "1"); + const pageSize = parseInt( + searchParams.get("scanPageSize") ?? String(DEFAULT_PAGE_SIZE), + ); + const [selectedPageSize, setSelectedPageSize] = useState(String(pageSize)); + + const totalPages = Math.ceil(scans.length / pageSize); + const startIndex = (currentPage - 1) * pageSize; + const endIndex = startIndex + pageSize; + const paginatedScans = scans.slice(startIndex, endIndex); + + const handleSelectScan = (scanId: string) => { + const params = new URLSearchParams(searchParams); + params.set("scanId", scanId); + router.push(`${pathname}?${params.toString()}`); + }; + + const isSelectDisabled = (scan: AttackPathScan) => { + return ( + scan.attributes.state !== SCAN_STATES.COMPLETED || + selectedScanId === scan.id + ); + }; + + const getSelectButtonLabel = (scan: AttackPathScan) => { + if (selectedScanId === scan.id) { + return "Selected"; + } + if (scan.attributes.state === SCAN_STATES.SCHEDULED) { + return "Scheduled"; + } + if (scan.attributes.state === SCAN_STATES.EXECUTING) { + return "Waiting..."; + } + if (scan.attributes.state === SCAN_STATES.FAILED) { + return "Failed"; + } + return "Select"; + }; + + const createPageUrl = (pageNumber: number | string) => { + const params = new URLSearchParams(searchParams); + + // Preserve scanId if it exists + const scanId = searchParams.get("scanId"); + + if (+pageNumber > totalPages) { + return `${pathname}?${params.toString()}`; + } + + params.set("scanPage", pageNumber.toString()); + + // Ensure that scanId is preserved + if (scanId) params.set("scanId", scanId); + + return `${pathname}?${params.toString()}`; + }; + + const isFirstPage = currentPage === 1; + const isLastPage = currentPage === totalPages; + + return ( + <> +
+ + + + Provider / Account + Last Scan Date + Status + Progress + Duration + Action + + + + {scans.length === 0 ? ( + + + No Attack Paths scans available. + + + ) : ( + paginatedScans.map((scan) => { + const isDisabled = isSelectDisabled(scan); + const isSelected = selectedScanId === scan.id; + const duration = scan.attributes.duration + ? `${Math.floor(scan.attributes.duration / 60)}m ${scan.attributes.duration % 60}s` + : "-"; + + return ( + + + + + + {scan.attributes.completed_at ? ( + + ) : ( + "-" + )} + + + + + + + {scan.attributes.progress}% + + + + {duration} + + + + + + ); + }) + )} + +
+ + {/* Pagination Controls */} + {scans.length > 0 && ( +
+
+ {scans.length} scans in total +
+ {scans.length > DEFAULT_PAGE_SIZE && ( +
+ {/* Rows per page selector */} +
+

+ Rows per page +

+ +
+
+ Page {currentPage} of {totalPages} +
+
+ isFirstPage && e.preventDefault()} + > +
+
+ )} +
+ )} +
+

+ Only Attack Paths scans with "Completed" status can be + selected. Scans in progress will update automatically. +

+ + ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/scan-status-badge.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/scan-status-badge.tsx new file mode 100644 index 0000000000..74c7302126 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_components/scan-status-badge.tsx @@ -0,0 +1,59 @@ +"use client"; + +import { Loader2 } from "lucide-react"; + +import { Badge } from "@/components/shadcn/badge/badge"; +import type { ScanState } from "@/types/attack-paths"; + +interface ScanStatusBadgeProps { + status: ScanState; + progress?: number; +} + +/** + * Status badge for attack path scan status + * Shows visual indicator and text for scan progress + */ +export const ScanStatusBadge = ({ + status, + progress = 0, +}: ScanStatusBadgeProps) => { + if (status === "scheduled") { + return ( + + Scheduled + + ); + } + + if (status === "available") { + return ( + + Queued + + ); + } + + if (status === "executing") { + return ( + + + In Progress ({progress}%) + + ); + } + + if (status === "completed") { + return ( + + Completed + + ); + } + + return ( + + Failed + + ); +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/index.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/index.ts new file mode 100644 index 0000000000..eec8e5f81a --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/index.ts @@ -0,0 +1,3 @@ +export { useGraphState } from "./use-graph-state"; +export { useQueryBuilder } from "./use-query-builder"; +export { useWizardState } from "./use-wizard-state"; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-graph-state.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-graph-state.ts new file mode 100644 index 0000000000..8af36bbcfb --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-graph-state.ts @@ -0,0 +1,182 @@ +"use client"; + +import { create } from "zustand"; + +import type { + AttackPathGraphData, + GraphNode, + GraphState, +} from "@/types/attack-paths"; + +import { computeFilteredSubgraph } from "../_lib"; + +interface FilteredViewState { + isFilteredView: boolean; + filteredNodeId: string | null; + fullData: AttackPathGraphData | null; // Original data before filtering +} + +interface GraphStore extends GraphState, FilteredViewState { + setGraphData: (data: AttackPathGraphData) => void; + setSelectedNodeId: (nodeId: string | null) => void; + setLoading: (loading: boolean) => void; + setError: (error: string | null) => void; + setZoom: (zoomLevel: number) => void; + setPan: (panX: number, panY: number) => void; + setFilteredView: ( + isFiltered: boolean, + nodeId: string | null, + filteredData: AttackPathGraphData | null, + fullData: AttackPathGraphData | null, + ) => void; + reset: () => void; +} + +const initialState: GraphState & FilteredViewState = { + data: null, + selectedNodeId: null, + loading: false, + error: null, + zoomLevel: 1, + panX: 0, + panY: 0, + isFilteredView: false, + filteredNodeId: null, + fullData: null, +}; + +const useGraphStore = create((set) => ({ + ...initialState, + setGraphData: (data) => + set({ + data, + fullData: null, + error: null, + isFilteredView: false, + filteredNodeId: null, + }), + setSelectedNodeId: (nodeId) => set({ selectedNodeId: nodeId }), + setLoading: (loading) => set({ loading }), + setError: (error) => set({ error }), + setZoom: (zoomLevel) => set({ zoomLevel }), + setPan: (panX, panY) => set({ panX, panY }), + setFilteredView: (isFiltered, nodeId, filteredData, fullData) => + set({ + isFilteredView: isFiltered, + filteredNodeId: nodeId, + data: filteredData, + fullData, + selectedNodeId: nodeId, + }), + reset: () => set(initialState), +})); + +/** + * Custom hook for managing graph visualization state + * Handles graph data, node selection, zoom/pan, loading states, and filtered view + */ +export const useGraphState = () => { + const store = useGraphStore(); + + // Zustand store methods are stable, no need to memoize + const updateGraphData = (data: AttackPathGraphData) => { + store.setGraphData(data); + }; + + const selectNode = (nodeId: string | null) => { + store.setSelectedNodeId(nodeId); + }; + + const getSelectedNode = (): GraphNode | null => { + if (!store.data?.nodes || !store.selectedNodeId) return null; + return ( + store.data.nodes.find((node) => node.id === store.selectedNodeId) || null + ); + }; + + const startLoading = () => { + store.setLoading(true); + }; + + const stopLoading = () => { + store.setLoading(false); + }; + + const setError = (error: string | null) => { + store.setError(error); + }; + + const updateZoomAndPan = (zoomLevel: number, panX: number, panY: number) => { + store.setZoom(zoomLevel); + store.setPan(panX, panY); + }; + + const resetGraph = () => { + store.reset(); + }; + + const clearGraph = () => { + store.setGraphData({ nodes: [], edges: [] }); + store.setSelectedNodeId(null); + store.setFilteredView(false, null, null, null); + }; + + /** + * Enter filtered view mode - redraws graph with only the selected path + * Stores full data so we can restore it when exiting filtered view + */ + const enterFilteredView = (nodeId: string) => { + if (!store.data) return; + + // Use fullData if we're already in filtered view, otherwise use current data + const sourceData = store.fullData || store.data; + const filteredData = computeFilteredSubgraph(sourceData, nodeId); + store.setFilteredView(true, nodeId, filteredData, sourceData); + }; + + /** + * Exit filtered view mode - restore full graph data + */ + const exitFilteredView = () => { + if (!store.isFilteredView || !store.fullData) return; + store.setFilteredView(false, null, store.fullData, null); + }; + + /** + * Get the node that was used to filter the view + */ + const getFilteredNode = (): GraphNode | null => { + if (!store.isFilteredView || !store.filteredNodeId) return null; + // Look in fullData since that's where the original node data is + const sourceData = store.fullData || store.data; + if (!sourceData) return null; + return ( + sourceData.nodes.find((node) => node.id === store.filteredNodeId) || null + ); + }; + + return { + data: store.data, + fullData: store.fullData, + selectedNodeId: store.selectedNodeId, + selectedNode: getSelectedNode(), + loading: store.loading, + error: store.error, + zoomLevel: store.zoomLevel, + panX: store.panX, + panY: store.panY, + isFilteredView: store.isFilteredView, + filteredNodeId: store.filteredNodeId, + filteredNode: getFilteredNode(), + updateGraphData, + selectNode, + startLoading, + stopLoading, + setError, + updateZoomAndPan, + resetGraph, + clearGraph, + enterFilteredView, + exitFilteredView, + }; +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-query-builder.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-query-builder.ts new file mode 100644 index 0000000000..b05c9d463c --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-query-builder.ts @@ -0,0 +1,98 @@ +"use client"; + +import { zodResolver } from "@hookform/resolvers/zod"; +import { useEffect, useState } from "react"; +import { useForm } from "react-hook-form"; +import { z } from "zod"; + +import type { AttackPathQuery } from "@/types/attack-paths"; + +/** + * Custom hook for managing query builder form state + * Handles query selection, parameter validation, and form submission + */ +export const useQueryBuilder = (availableQueries: AttackPathQuery[]) => { + const [selectedQuery, setSelectedQuery] = useState(null); + + // Generate dynamic Zod schema based on selected query parameters + const getValidationSchema = (queryId: string | null) => { + const schemaObject: Record = {}; + + if (queryId) { + const query = availableQueries.find((q) => q.id === queryId); + + if (query) { + query.attributes.parameters.forEach((param) => { + let fieldSchema: z.ZodTypeAny = z + .string() + .min(1, `${param.label} is required`); + + if (param.data_type === "number") { + fieldSchema = z.coerce.number().refine((val) => val >= 0, { + message: `${param.label} must be a non-negative number`, + }); + } else if (param.data_type === "boolean") { + fieldSchema = z.boolean().default(false); + } + + schemaObject[param.name] = fieldSchema; + }); + } + } + + return z.object(schemaObject); + }; + + const getDefaultValues = (queryId: string | null) => { + const defaults: Record = {}; + + const query = availableQueries.find((q) => q.id === queryId); + if (query) { + query.attributes.parameters.forEach((param) => { + defaults[param.name] = param.data_type === "boolean" ? false : ""; + }); + } + + return defaults; + }; + + const form = useForm({ + resolver: zodResolver(getValidationSchema(selectedQuery)), + mode: "onChange", + defaultValues: getDefaultValues(selectedQuery), + }); + + // Update form when selectedQuery changes + useEffect(() => { + form.reset(getDefaultValues(selectedQuery), { + keepDirtyValues: false, + }); + }, [selectedQuery]); // eslint-disable-line react-hooks/exhaustive-deps + + const selectedQueryData = availableQueries.find( + (q) => q.id === selectedQuery, + ); + + const handleQueryChange = (queryId: string) => { + setSelectedQuery(queryId); + form.reset(); + }; + + const getQueryParameters = () => { + return form.getValues(); + }; + + const isFormValid = () => { + return form.formState.isValid; + }; + + return { + selectedQuery, + selectedQueryData, + availableQueries, + form, + handleQueryChange, + getQueryParameters, + isFormValid, + }; +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-wizard-state.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-wizard-state.ts new file mode 100644 index 0000000000..787cb56617 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_hooks/use-wizard-state.ts @@ -0,0 +1,91 @@ +"use client"; + +import { useRouter } from "next/navigation"; +import { useCallback } from "react"; +import { create } from "zustand"; + +import type { WizardState } from "@/types/attack-paths"; + +interface WizardStore extends WizardState { + setCurrentStep: (step: 1 | 2) => void; + setSelectedScanId: (scanId: string) => void; + setSelectedQuery: (queryId: string) => void; + setQueryParameters: ( + parameters: Record, + ) => void; + reset: () => void; +} + +const initialState: WizardState = { + currentStep: 1, + selectedScanId: null, + selectedQuery: null, + queryParameters: {}, +}; + +const useWizardStore = create((set) => ({ + ...initialState, + setCurrentStep: (step) => set({ currentStep: step }), + setSelectedScanId: (scanId) => set({ selectedScanId: scanId }), + setSelectedQuery: (queryId) => set({ selectedQuery: queryId }), + setQueryParameters: (parameters) => set({ queryParameters: parameters }), + reset: () => set(initialState), +})); + +/** + * Custom hook for managing Attack Paths wizard state + * Handles step navigation, scan selection, and query configuration + */ +export const useWizardState = () => { + const router = useRouter(); + + const store = useWizardStore(); + + // Derive current step from URL path + const currentStep: 1 | 2 = + typeof window !== "undefined" + ? window.location.pathname.includes("query-builder") + ? 2 + : 1 + : 1; + + const goToSelectScan = useCallback(() => { + store.setCurrentStep(1); + router.push("/attack-paths/select-scan"); + }, [router, store]); + + const goToQueryBuilder = useCallback( + (scanId: string) => { + store.setSelectedScanId(scanId); + store.setCurrentStep(2); + router.push(`/attack-paths/query-builder?scanId=${scanId}`); + }, + [router, store], + ); + + const updateQueryParameters = useCallback( + (parameters: Record) => { + store.setQueryParameters(parameters); + }, + [store], + ); + + const getScanIdFromUrl = useCallback(() => { + const params = new URLSearchParams( + typeof window !== "undefined" ? window.location.search : "", + ); + return params.get("scanId") || store.selectedScanId; + }, [store.selectedScanId]); + + return { + currentStep, + selectedScanId: store.selectedScanId || getScanIdFromUrl(), + selectedQuery: store.selectedQuery, + queryParameters: store.queryParameters, + goToSelectScan, + goToQueryBuilder, + setSelectedQuery: store.setSelectedQuery, + updateQueryParameters, + reset: store.reset, + }; +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/export.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/export.ts new file mode 100644 index 0000000000..fd04b6a31e --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/export.ts @@ -0,0 +1,145 @@ +/** + * Export utilities for attack path graphs + * Handles exporting graph visualization to various formats + */ + +/** + * Helper function to download a blob as a file + * @param blob The blob to download + * @param filename The name of the file + */ +const downloadBlob = (blob: Blob, filename: string) => { + const url = URL.createObjectURL(blob); + const link = document.createElement("a"); + link.href = url; + link.download = filename; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + URL.revokeObjectURL(url); +}; + +/** + * Export graph as SVG image + * @param svgElement The SVG element to export + * @param filename The name of the file to download + */ +export const exportGraphAsSVG = ( + svgElement: SVGSVGElement | null, + filename: string = "attack-path-graph.svg", +) => { + if (!svgElement) return; + + try { + // Clone the SVG element to avoid modifying the original + const clonedSvg = svgElement.cloneNode(true) as SVGSVGElement; + + // Find the main container group (first g element with transform) + const containerGroup = clonedSvg.querySelector("g"); + if (!containerGroup) { + throw new Error("Could not find graph container"); + } + + // Get the bounding box of the actual graph content + // We need to get it from the original SVG since cloned elements don't have computed geometry + const originalContainer = svgElement.querySelector("g"); + if (!originalContainer) { + throw new Error("Could not find original graph container"); + } + + const bbox = originalContainer.getBBox(); + + // Add padding around the content + const padding = 50; + const contentWidth = bbox.width + padding * 2; + const contentHeight = bbox.height + padding * 2; + + // Set the SVG dimensions to fit the content + clonedSvg.setAttribute("width", `${contentWidth}`); + clonedSvg.setAttribute("height", `${contentHeight}`); + clonedSvg.setAttribute( + "viewBox", + `${bbox.x - padding} ${bbox.y - padding} ${contentWidth} ${contentHeight}`, + ); + + // Remove the zoom transform from the container - the viewBox now handles positioning + containerGroup.removeAttribute("transform"); + + // Add white background for better visibility + const bgRect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect", + ); + bgRect.setAttribute("x", `${bbox.x - padding}`); + bgRect.setAttribute("y", `${bbox.y - padding}`); + bgRect.setAttribute("width", `${contentWidth}`); + bgRect.setAttribute("height", `${contentHeight}`); + bgRect.setAttribute("fill", "#1c1917"); // Dark background matching the app + clonedSvg.insertBefore(bgRect, clonedSvg.firstChild); + + const svgData = new XMLSerializer().serializeToString(clonedSvg); + const blob = new Blob([svgData], { type: "image/svg+xml" }); + downloadBlob(blob, filename); + } catch (error) { + console.error("Failed to export graph as SVG:", error); + throw new Error("Failed to export graph"); + } +}; + +/** + * Export graph as PNG image + * @param svgElement The SVG element to export + * @param filename The name of the file to download + */ +export const exportGraphAsPNG = async ( + svgElement: SVGSVGElement | null, + filename: string = "attack-path-graph.png", +) => { + if (!svgElement) return; + + try { + const svgData = new XMLSerializer().serializeToString(svgElement); + const canvas = document.createElement("canvas"); + const ctx = canvas.getContext("2d") as CanvasRenderingContext2D; + + if (!ctx) throw new Error("Could not get canvas context"); + + const svg = new Image(); + svg.onload = () => { + canvas.width = svg.width; + canvas.height = svg.height; + ctx.drawImage(svg, 0, 0); + canvas.toBlob((blob) => { + if (blob) { + downloadBlob(blob, filename); + } + }); + }; + svg.onerror = () => { + throw new Error("Failed to load SVG for PNG conversion"); + }; + svg.src = `data:image/svg+xml;base64,${btoa(svgData)}`; + } catch (error) { + console.error("Failed to export graph as PNG:", error); + throw new Error("Failed to export graph"); + } +}; + +/** + * Export graph data as JSON + * @param graphData The graph data to export + * @param filename The name of the file to download + */ +export const exportGraphAsJSON = ( + graphData: Record, + filename: string = "attack-path-graph.json", +) => { + try { + const jsonString = JSON.stringify(graphData, null, 2); + const blob = new Blob([jsonString], { type: "application/json" }); + downloadBlob(blob, filename); + } catch (error) { + console.error("Failed to export graph as JSON:", error); + throw new Error("Failed to export graph"); + } +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/format.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/format.ts new file mode 100644 index 0000000000..02871e270e --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/format.ts @@ -0,0 +1,25 @@ +/** + * Formatting utilities for attack path graph nodes + */ + +/** + * Format camelCase labels to space-separated text + * e.g., "ProwlerFinding" -> "Prowler Finding", "AWSAccount" -> "Aws Account" + */ +export function formatNodeLabel(label: string): string { + return label + .replace(/([A-Z]+)([A-Z][a-z])/g, "$1 $2") + .replace(/([a-z\d])([A-Z])/g, "$1 $2") + .trim() + .split(" ") + .map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()) + .join(" "); +} + +/** + * Format multiple node labels into a readable string + * e.g., ["ProwlerFinding"] -> "Prowler Finding" + */ +export function formatNodeLabels(labels: string[]): string { + return labels.map(formatNodeLabel).join(", "); +} diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/graph-colors.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/graph-colors.ts new file mode 100644 index 0000000000..eb207535ad --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/graph-colors.ts @@ -0,0 +1,139 @@ +/** + * Color constants for attack path graph visualization + * Colors chosen to work well in both light and dark themes + */ + +/** + * Node fill colors - darker versions of design system severity colors + * Darkened to ensure white text has proper contrast (WCAG AA) + */ +export const GRAPH_NODE_COLORS = { + // Finding severities - darkened versions for white text readability + critical: "#cc0055", // Darker pink (from #ff006a) + high: "#c45a3a", // Darker coral (from #f77852) + medium: "#b8860b", // Dark goldenrod (from #fec94d) + low: "#8b9a3e", // Olive/dark yellow-green (from #fdfbd4) + info: "#2563eb", // Darker blue (from #3c8dff) + // Node types + prowlerFinding: "#ea580c", + awsAccount: "#f59e0b", // Amber 500 - AWS orange + attackPattern: "#16a34a", + summary: "#16a34a", + // Infrastructure + ec2Instance: "#0891b2", // Cyan 600 + s3Bucket: "#0284c7", // Sky 600 + iamRole: "#7c3aed", // Violet 600 + iamPolicy: "#7c3aed", + lambdaFunction: "#d97706", // Amber 600 + securityGroup: "#0891b2", + default: "#0891b2", +} as const; + +/** + * Node border colors - using original design system colors as borders (lighter than fill) + */ +export const GRAPH_NODE_BORDER_COLORS = { + critical: "#ff006a", // Original --bg-data-critical + high: "#f77852", // Original --bg-data-high + medium: "#fec94d", // Original --bg-data-medium + low: "#c4d4a0", // Lighter olive + info: "#3c8dff", // Original --bg-data-info + prowlerFinding: "#fb923c", + awsAccount: "#fbbf24", // Amber 400 + attackPattern: "#4ade80", + summary: "#4ade80", + ec2Instance: "#22d3ee", // Cyan 400 + s3Bucket: "#38bdf8", // Sky 400 + iamRole: "#a78bfa", // Violet 400 + iamPolicy: "#a78bfa", + lambdaFunction: "#fbbf24", + securityGroup: "#22d3ee", + default: "#22d3ee", +} as const; + +export const GRAPH_EDGE_COLOR = "#ffffff"; // White (default) +export const GRAPH_EDGE_HIGHLIGHT_COLOR = "#f97316"; // Orange 500 (on hover) +export const GRAPH_EDGE_GLOW_COLOR = "#fb923c"; +export const GRAPH_SELECTION_COLOR = "#ffffff"; +export const GRAPH_BORDER_COLOR = "#374151"; +export const GRAPH_ALERT_BORDER_COLOR = "#ef4444"; // Red 500 - for resources with findings + +/** + * Get node fill color based on labels and properties + */ +export const getNodeColor = ( + labels: string[], + properties?: Record, +): string => { + const isFinding = labels.some((l) => l.toLowerCase().includes("finding")); + if (isFinding && properties?.severity) { + const severity = String(properties.severity).toLowerCase(); + if (severity === "critical") return GRAPH_NODE_COLORS.critical; + if (severity === "high") return GRAPH_NODE_COLORS.high; + if (severity === "medium") return GRAPH_NODE_COLORS.medium; + if (severity === "low") return GRAPH_NODE_COLORS.low; + if (severity === "informational" || severity === "info") + return GRAPH_NODE_COLORS.info; + return GRAPH_NODE_COLORS.prowlerFinding; + } + + if (labels.some((l) => l.toLowerCase().includes("attackpattern"))) + return GRAPH_NODE_COLORS.attackPattern; + if (labels.includes("AWSAccount")) return GRAPH_NODE_COLORS.awsAccount; + if (labels.includes("EC2Instance")) return GRAPH_NODE_COLORS.ec2Instance; + if (labels.includes("S3Bucket")) return GRAPH_NODE_COLORS.s3Bucket; + if (labels.includes("IAMRole")) return GRAPH_NODE_COLORS.iamRole; + if (labels.includes("IAMPolicy")) return GRAPH_NODE_COLORS.iamPolicy; + if (labels.includes("LambdaFunction")) + return GRAPH_NODE_COLORS.lambdaFunction; + if (labels.includes("SecurityGroup")) return GRAPH_NODE_COLORS.securityGroup; + + return GRAPH_NODE_COLORS.default; +}; + +/** + * Get node border color based on labels and properties + */ +export const getNodeBorderColor = ( + labels: string[], + properties?: Record, +): string => { + const isFinding = labels.some((l) => l.toLowerCase().includes("finding")); + if (isFinding && properties?.severity) { + const severity = String(properties.severity).toLowerCase(); + if (severity === "critical") return GRAPH_NODE_BORDER_COLORS.critical; + if (severity === "high") return GRAPH_NODE_BORDER_COLORS.high; + if (severity === "medium") return GRAPH_NODE_BORDER_COLORS.medium; + if (severity === "low") return GRAPH_NODE_BORDER_COLORS.low; + if (severity === "informational" || severity === "info") + return GRAPH_NODE_BORDER_COLORS.info; + return GRAPH_NODE_BORDER_COLORS.prowlerFinding; + } + + if (labels.some((l) => l.toLowerCase().includes("attackpattern"))) + return GRAPH_NODE_BORDER_COLORS.attackPattern; + if (labels.includes("AWSAccount")) return GRAPH_NODE_BORDER_COLORS.awsAccount; + if (labels.includes("EC2Instance")) + return GRAPH_NODE_BORDER_COLORS.ec2Instance; + if (labels.includes("S3Bucket")) return GRAPH_NODE_BORDER_COLORS.s3Bucket; + if (labels.includes("IAMRole")) return GRAPH_NODE_BORDER_COLORS.iamRole; + if (labels.includes("IAMPolicy")) return GRAPH_NODE_BORDER_COLORS.iamPolicy; + if (labels.includes("LambdaFunction")) + return GRAPH_NODE_BORDER_COLORS.lambdaFunction; + if (labels.includes("SecurityGroup")) + return GRAPH_NODE_BORDER_COLORS.securityGroup; + + return GRAPH_NODE_BORDER_COLORS.default; +}; + +/** + * Check if a background color is light (for determining text color) + */ +export const isLightBackground = (backgroundColor: string): boolean => { + const hex = backgroundColor.replace("#", ""); + const r = parseInt(hex.substring(0, 2), 16); + const g = parseInt(hex.substring(2, 4), 16); + const b = parseInt(hex.substring(4, 6), 16); + const luminance = (0.299 * r + 0.587 * g + 0.114 * b) / 255; + return luminance > 0.5; +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/graph-utils.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/graph-utils.ts new file mode 100644 index 0000000000..ac5c219bfb --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/graph-utils.ts @@ -0,0 +1,187 @@ +/** + * Utility functions for attack path graph operations + */ + +import type { AttackPathGraphData } from "@/types/attack-paths"; + +/** + * Type for edge node reference - can be a string ID or an object with id property + * Note: We use `object` to match GraphEdge type from attack-paths.ts + */ +export type EdgeNodeRef = string | object; + +/** + * Helper to get edge source/target ID from string or object + */ +export const getEdgeNodeId = (nodeRef: EdgeNodeRef): string => { + if (typeof nodeRef === "string") { + return nodeRef; + } + // Edge node references are objects with an id property + return (nodeRef as { id: string }).id; +}; + +/** + * Compute a filtered subgraph containing only the path through the target node. + * This follows the directed graph structure of attack paths: + * - Upstream: traces back to the root (AWS Account) + * - Downstream: traces forward to leaf nodes + * - Also includes findings connected to the selected node + */ +export const computeFilteredSubgraph = ( + fullData: AttackPathGraphData, + targetNodeId: string, +): AttackPathGraphData => { + const nodes = fullData.nodes; + const edges = fullData.edges || []; + + // Build directed adjacency lists + const forwardEdges = new Map>(); // source -> targets + const backwardEdges = new Map>(); // target -> sources + nodes.forEach((node) => { + forwardEdges.set(node.id, new Set()); + backwardEdges.set(node.id, new Set()); + }); + + edges.forEach((edge) => { + const sourceId = getEdgeNodeId(edge.source); + const targetId = getEdgeNodeId(edge.target); + forwardEdges.get(sourceId)?.add(targetId); + backwardEdges.get(targetId)?.add(sourceId); + }); + + const visibleNodeIds = new Set(); + visibleNodeIds.add(targetNodeId); + + // Traverse upstream (backward) - find all ancestors + const traverseUpstream = (nodeId: string) => { + const sources = backwardEdges.get(nodeId); + if (sources) { + sources.forEach((sourceId) => { + if (!visibleNodeIds.has(sourceId)) { + visibleNodeIds.add(sourceId); + traverseUpstream(sourceId); + } + }); + } + }; + + // Traverse downstream (forward) - find all descendants + const traverseDownstream = (nodeId: string) => { + const targets = forwardEdges.get(nodeId); + if (targets) { + targets.forEach((targetId) => { + if (!visibleNodeIds.has(targetId)) { + visibleNodeIds.add(targetId); + traverseDownstream(targetId); + } + }); + } + }; + + // Start traversal from the target node + traverseUpstream(targetNodeId); + traverseDownstream(targetNodeId); + + // Also include findings directly connected to the selected node + edges.forEach((edge) => { + const sourceId = getEdgeNodeId(edge.source); + const targetId = getEdgeNodeId(edge.target); + const sourceNode = nodes.find((n) => n.id === sourceId); + const targetNode = nodes.find((n) => n.id === targetId); + + const sourceIsFinding = sourceNode?.labels.some((l) => + l.toLowerCase().includes("finding"), + ); + const targetIsFinding = targetNode?.labels.some((l) => + l.toLowerCase().includes("finding"), + ); + + // Include findings connected to the selected node + if (sourceId === targetNodeId && targetIsFinding) { + visibleNodeIds.add(targetId); + } + if (targetId === targetNodeId && sourceIsFinding) { + visibleNodeIds.add(sourceId); + } + }); + + // Filter nodes and edges to only include visible ones + const filteredNodes = nodes.filter((node) => visibleNodeIds.has(node.id)); + const filteredEdges = edges.filter((edge) => { + const sourceId = getEdgeNodeId(edge.source); + const targetId = getEdgeNodeId(edge.target); + return visibleNodeIds.has(sourceId) && visibleNodeIds.has(targetId); + }); + + return { + nodes: filteredNodes, + edges: filteredEdges, + }; +}; + +/** + * Find edges in the path from a given node. + * Upstream: follows only ONE parent path (first parent at each level) to avoid lighting up siblings + * Downstream: follows ALL children recursively + * + * Uses pre-built adjacency maps for O(1) lookups instead of O(n) array searches per traversal step. + * + * @param nodeId - The starting node ID + * @param edges - Array of edges with sourceId and targetId + * @returns Set of edge IDs in the format "sourceId-targetId" + */ +export const getPathEdges = ( + nodeId: string, + edges: Array<{ sourceId: string; targetId: string }>, +): Set => { + // Build adjacency maps once - O(n) + const parentMap = new Map(); + const childrenMap = new Map< + string, + Array<{ sourceId: string; targetId: string }> + >(); + + edges.forEach((edge) => { + // First parent only (matches original behavior of find()) + if (!parentMap.has(edge.targetId)) { + parentMap.set(edge.targetId, edge); + } + const children = childrenMap.get(edge.sourceId) || []; + children.push(edge); + childrenMap.set(edge.sourceId, children); + }); + + const pathEdgeIds = new Set(); + const visitedNodes = new Set(); + + // Traverse upstream - only follow ONE parent at each level (first found) + // This creates a single path to the root, not all paths + const traverseUpstream = (currentNodeId: string) => { + if (visitedNodes.has(`up-${currentNodeId}`)) return; + visitedNodes.add(`up-${currentNodeId}`); + + const parentEdge = parentMap.get(currentNodeId); // O(1) lookup + if (parentEdge) { + pathEdgeIds.add(`${parentEdge.sourceId}-${parentEdge.targetId}`); + traverseUpstream(parentEdge.sourceId); + } + }; + + // Traverse downstream (find ALL targets from this node) + const traverseDownstream = (currentNodeId: string) => { + if (visitedNodes.has(`down-${currentNodeId}`)) return; + visitedNodes.add(`down-${currentNodeId}`); + + const children = childrenMap.get(currentNodeId) || []; // O(1) lookup + children.forEach((edge) => { + pathEdgeIds.add(`${edge.sourceId}-${edge.targetId}`); + traverseDownstream(edge.targetId); + }); + }; + + traverseUpstream(nodeId); + traverseDownstream(nodeId); + + return pathEdgeIds; +}; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/index.ts b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/index.ts new file mode 100644 index 0000000000..bd7581751f --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/_lib/index.ts @@ -0,0 +1,22 @@ +export { + exportGraphAsJSON, + exportGraphAsPNG, + exportGraphAsSVG, +} from "./export"; +export { formatNodeLabel, formatNodeLabels } from "./format"; +export { + getNodeBorderColor, + getNodeColor, + GRAPH_ALERT_BORDER_COLOR, + GRAPH_EDGE_COLOR, + GRAPH_EDGE_HIGHLIGHT_COLOR, + GRAPH_NODE_BORDER_COLORS, + GRAPH_NODE_COLORS, + GRAPH_SELECTION_COLOR, +} from "./graph-colors"; +export { + computeFilteredSubgraph, + type EdgeNodeRef, + getEdgeNodeId, + getPathEdges, +} from "./graph-utils"; diff --git a/ui/app/(prowler)/attack-paths/(workflow)/query-builder/page.tsx b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/page.tsx new file mode 100644 index 0000000000..a817b9a715 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/(workflow)/query-builder/page.tsx @@ -0,0 +1,626 @@ +"use client"; + +import { ArrowLeft, Maximize2, X } from "lucide-react"; +import { useSearchParams } from "next/navigation"; +import { Suspense, useCallback, useEffect, useRef, useState } from "react"; +import { FormProvider } from "react-hook-form"; + +import { + executeQuery, + getAttackPathScans, + getAvailableQueries, +} from "@/actions/attack-paths"; +import { adaptQueryResultToGraphData } from "@/actions/attack-paths/query-result.adapter"; +import { AutoRefresh } from "@/components/scans"; +import { Button, Card, CardContent } from "@/components/shadcn"; +import { + Dialog, + DialogContent, + DialogHeader, + DialogTitle, + DialogTrigger, + useToast, +} from "@/components/ui"; +import type { + AttackPathQuery, + AttackPathScan, + GraphNode, +} from "@/types/attack-paths"; + +import { + AttackPathGraph, + ExecuteButton, + GraphControls, + GraphLegend, + GraphLoading, + NodeDetailContent, + QueryParametersForm, + QuerySelector, + ScanListTable, +} from "./_components"; +import type { AttackPathGraphRef } from "./_components/graph/attack-path-graph"; +import { useGraphState } from "./_hooks/use-graph-state"; +import { useQueryBuilder } from "./_hooks/use-query-builder"; +import { exportGraphAsSVG, formatNodeLabel } from "./_lib"; + +/** + * Attack Paths Analysis + * Allows users to select a scan, build a query, and visualize the Attack Paths graph + */ +export default function AttackPathAnalysisPage() { + const searchParams = useSearchParams(); + const scanId = searchParams.get("scanId"); + const graphState = useGraphState(); + const { toast } = useToast(); + + const [scansLoading, setScansLoading] = useState(true); + const [scans, setScans] = useState([]); + const [queriesLoading, setQueriesLoading] = useState(true); + const [queriesError, setQueriesError] = useState(null); + const [isFullscreenOpen, setIsFullscreenOpen] = useState(false); + const graphRef = useRef(null); + const fullscreenGraphRef = useRef(null); + const hasResetRef = useRef(false); + const nodeDetailsRef = useRef(null); + const graphContainerRef = useRef(null); + + const [queries, setQueries] = useState([]); + + // Use custom hook for query builder form state and validation + const queryBuilder = useQueryBuilder(queries); + + // Reset graph state when component mounts + useEffect(() => { + if (!hasResetRef.current) { + hasResetRef.current = true; + graphState.resetGraph(); + } + }, [graphState]); + + // Load available scans on mount + useEffect(() => { + const loadScans = async () => { + setScansLoading(true); + try { + const scansData = await getAttackPathScans(); + if (scansData?.data) { + setScans(scansData.data); + } else { + setScans([]); + } + } catch (error) { + console.error("Failed to load scans:", error); + setScans([]); + } finally { + setScansLoading(false); + } + }; + + loadScans(); + }, []); + + // Check if there's an executing scan for auto-refresh + const hasExecutingScan = scans.some( + (scan) => + scan.attributes.state === "executing" || + scan.attributes.state === "scheduled", + ); + + // Callback to refresh scans (used by AutoRefresh component) + const refreshScans = useCallback(async () => { + try { + const scansData = await getAttackPathScans(); + if (scansData?.data) { + setScans(scansData.data); + } + } catch (error) { + console.error("Failed to refresh scans:", error); + } + }, []); + + // Load available queries on mount + useEffect(() => { + const loadQueries = async () => { + if (!scanId) { + setQueriesError("No scan selected"); + setQueriesLoading(false); + return; + } + + setQueriesLoading(true); + try { + const queriesData = await getAvailableQueries(scanId); + if (queriesData?.data) { + setQueries(queriesData.data); + setQueriesError(null); + } else { + setQueriesError("Failed to load available queries"); + toast({ + title: "Error", + description: "Failed to load queries for this scan", + variant: "destructive", + }); + } + } catch (error) { + const errorMsg = + error instanceof Error ? error.message : "Unknown error"; + setQueriesError(errorMsg); + toast({ + title: "Error", + description: "Failed to load queries", + variant: "destructive", + }); + } finally { + setQueriesLoading(false); + } + }; + + loadQueries(); + }, [scanId, toast]); + + const handleQueryChange = (queryId: string) => { + queryBuilder.handleQueryChange(queryId); + }; + + const showErrorToast = (title: string, description: string) => { + toast({ + title, + description, + variant: "destructive", + }); + }; + + const handleExecuteQuery = async () => { + if (!scanId || !queryBuilder.selectedQuery) { + showErrorToast("Error", "Please select both a scan and a query"); + return; + } + + // Validate form before executing query + const isValid = await queryBuilder.form.trigger(); + if (!isValid) { + showErrorToast( + "Validation Error", + "Please fill in all required parameters", + ); + return; + } + + graphState.startLoading(); + graphState.setError(null); + + try { + const parameters = queryBuilder.getQueryParameters() as Record< + string, + string | number | boolean + >; + const result = await executeQuery( + scanId, + queryBuilder.selectedQuery, + parameters, + ); + + if (result?.data?.attributes) { + const graphData = adaptQueryResultToGraphData(result.data.attributes); + graphState.updateGraphData(graphData); + toast({ + title: "Success", + description: "Query executed successfully", + variant: "default", + }); + + // Scroll to graph after successful query execution + setTimeout(() => { + graphContainerRef.current?.scrollIntoView({ + behavior: "smooth", + block: "start", + }); + }, 100); + } else { + graphState.resetGraph(); + graphState.setError("No data returned from query"); + showErrorToast("Error", "Query returned no data"); + } + } catch (error) { + const errorMsg = + error instanceof Error ? error.message : "Failed to execute query"; + graphState.resetGraph(); + graphState.setError(errorMsg); + showErrorToast("Error", errorMsg); + } finally { + graphState.stopLoading(); + } + }; + + const handleNodeClick = (node: GraphNode) => { + // Enter filtered view showing only paths containing this node + graphState.enterFilteredView(node.id); + + // For findings, also scroll to the details section + const isFinding = node.labels.some((label) => + label.toLowerCase().includes("finding"), + ); + + if (isFinding) { + setTimeout(() => { + nodeDetailsRef.current?.scrollIntoView({ + behavior: "smooth", + block: "nearest", + }); + }, 100); + } + }; + + const handleBackToFullView = () => { + graphState.exitFilteredView(); + }; + + const handleCloseDetails = () => { + graphState.selectNode(null); + }; + + const handleGraphExport = (svgElement: SVGSVGElement | null) => { + try { + if (svgElement) { + exportGraphAsSVG(svgElement, "attack-path-graph.svg"); + toast({ + title: "Success", + description: "Graph exported as SVG", + variant: "default", + }); + } else { + throw new Error("Could not find graph element"); + } + } catch (error) { + toast({ + title: "Error", + description: + error instanceof Error ? error.message : "Failed to export graph", + variant: "destructive", + }); + } + }; + + return ( +
+ {/* Auto-refresh scans when there's an executing scan */} + + + {/* Header */} +
+

+ Attack Paths Analysis +

+

+ Select a scan, build a query, and visualize Attack Paths in your + infrastructure. +

+
+ + {/* Top Section - Scans Table and Query Builder (2 columns) */} +
+ {/* Scans Table Section - Left Column */} +
+ {scansLoading ? ( +
+

Loading scans...

+
+ ) : scans.length === 0 ? ( +
+

No scans available

+
+ ) : ( + Loading scans...
}> + + + )} +
+ + {/* Query Builder Section - Right Column */} +
+ {!scanId ? ( +

+ Select a scan from the table on the left to begin. +

+ ) : queriesLoading ? ( +

Loading queries...

+ ) : queriesError ? ( +

+ {queriesError} +

+ ) : ( + <> + + + + {queryBuilder.selectedQuery && ( + + )} + + +
+ +
+ + {graphState.error && ( +
+ {graphState.error} +
+ )} + + )} +
+
+ + {/* Bottom Section - Graph Visualization (Full Width) */} +
+ {graphState.loading ? ( + + ) : graphState.data && + graphState.data.nodes && + graphState.data.nodes.length > 0 ? ( + <> + {/* Info message and controls */} +
+ {graphState.isFilteredView ? ( +
+ +
+ + + Showing paths for:{" "} + + {graphState.filteredNode?.properties?.name || + graphState.filteredNode?.properties?.id || + "Selected node"} + + +
+
+ ) : ( +
+ + + Click on any node to filter and view its connected paths + +
+ )} + + {/* Graph controls and fullscreen button together */} +
+ graphRef.current?.zoomIn()} + onZoomOut={() => graphRef.current?.zoomOut()} + onFitToScreen={() => graphRef.current?.resetZoom()} + onExport={() => + handleGraphExport(graphRef.current?.getSVGElement() || null) + } + /> + + {/* Fullscreen button */} +
+ + + + + + + + Graph Fullscreen View + + +
+ fullscreenGraphRef.current?.zoomIn()} + onZoomOut={() => + fullscreenGraphRef.current?.zoomOut() + } + onFitToScreen={() => + fullscreenGraphRef.current?.resetZoom() + } + onExport={() => + handleGraphExport( + fullscreenGraphRef.current?.getSVGElement() || + null, + ) + } + /> +
+
+
+ +
+ {/* Node Detail Panel - Side by side */} + {graphState.selectedNode && ( +
+ + +
+

+ Node Details +

+ +
+

+ {graphState.selectedNode?.labels.some( + (label) => + label.toLowerCase().includes("finding"), + ) + ? graphState.selectedNode?.properties + ?.check_title || + graphState.selectedNode?.properties?.id || + "Unknown Finding" + : graphState.selectedNode?.properties + ?.name || + graphState.selectedNode?.properties?.id || + "Unknown Resource"} +

+
+
+

+ Type +

+

+ {graphState.selectedNode?.labels + .map(formatNodeLabel) + .join(", ")} +

+
+
+
+
+
+ )} +
+
+
+
+
+
+ + {/* Graph in the middle */} +
+ +
+ + {/* Legend below */} +
+ +
+ + ) : ( +
+

+ Select a query and click "Execute Query" to visualize + the Attack Paths graph +

+
+ )} +
+ + {/* Node Detail Panel - Below Graph */} + {graphState.selectedNode && graphState.data && ( +
+
+
+

Node Details

+

+ {String( + graphState.selectedNode.labels.some((label) => + label.toLowerCase().includes("finding"), + ) + ? graphState.selectedNode.properties?.check_title || + graphState.selectedNode.properties?.id || + "Unknown Finding" + : graphState.selectedNode.properties?.name || + graphState.selectedNode.properties?.id || + "Unknown Resource", + )} +

+
+
+ {graphState.selectedNode.labels.some((label) => + label.toLowerCase().includes("finding"), + ) && ( + + )} + +
+
+ + +
+ )} + + ); +} diff --git a/ui/app/(prowler)/attack-paths/page.tsx b/ui/app/(prowler)/attack-paths/page.tsx new file mode 100644 index 0000000000..3fe92b08f8 --- /dev/null +++ b/ui/app/(prowler)/attack-paths/page.tsx @@ -0,0 +1,9 @@ +import { redirect } from "next/navigation"; + +/** + * Landing page for Attack Paths feature + * Redirects to the integrated attack path analysis view + */ +export default function AttackPathsPage() { + redirect("/attack-paths/query-builder"); +} diff --git a/ui/app/(prowler)/findings/page.tsx b/ui/app/(prowler)/findings/page.tsx index fa99207b6b..5ba469b745 100644 --- a/ui/app/(prowler)/findings/page.tsx +++ b/ui/app/(prowler)/findings/page.tsx @@ -1,6 +1,7 @@ import { Suspense } from "react"; import { + getFindingById, getFindings, getLatestFindings, getLatestMetadataInfo, @@ -8,6 +9,7 @@ import { } from "@/actions/findings"; import { getProviders } from "@/actions/providers"; import { getScans } from "@/actions/scans"; +import { FindingDetailsSheet } from "@/components/findings"; import { FindingsFilters } from "@/components/findings/findings-filters"; import { FindingsTableWithSelection, @@ -41,15 +43,79 @@ export default async function Findings({ // Check if the searchParams contain any date or scan filter const hasDateOrScan = hasDateOrScanFilter(resolvedSearchParams); - const [metadataInfoData, providersData, scansData] = await Promise.all([ - (hasDateOrScan ? getMetadataInfo : getLatestMetadataInfo)({ - query, - sort: encodedSort, - filters, - }), - getProviders({ pageSize: 50 }), - getScans({ pageSize: 50 }), - ]); + // Check if there's a specific finding ID to fetch + const findingId = resolvedSearchParams.id?.toString(); + + const [metadataInfoData, providersData, scansData, findingByIdData] = + await Promise.all([ + (hasDateOrScan ? getMetadataInfo : getLatestMetadataInfo)({ + query, + sort: encodedSort, + filters, + }), + getProviders({ pageSize: 50 }), + getScans({ pageSize: 50 }), + findingId + ? getFindingById(findingId, "resources,scan.provider") + : Promise.resolve(null), + ]); + + // Process the finding data to match the expected structure + const processedFinding = findingByIdData?.data + ? (() => { + const finding = findingByIdData.data; + const included = findingByIdData.included || []; + + // Build dictionaries from included data + type IncludedItem = { + type: string; + id: string; + attributes: Record; + relationships?: { + provider?: { data?: { id: string } }; + }; + }; + + const resourceDict: Record = {}; + const scanDict: Record = {}; + const providerDict: Record = {}; + + included.forEach((item: IncludedItem) => { + if (item.type === "resources") { + resourceDict[item.id] = { + id: item.id, + attributes: item.attributes, + }; + } else if (item.type === "scans") { + scanDict[item.id] = item; + } else if (item.type === "providers") { + providerDict[item.id] = { + id: item.id, + attributes: item.attributes, + }; + } + }); + + const scanId = finding.relationships?.scan?.data?.id; + const resourceId = finding.relationships?.resources?.data?.[0]?.id; + const scan = scanId ? scanDict[scanId] : undefined; + const providerId = scan?.relationships?.provider?.data?.id; + + const resource = resourceId ? resourceDict[resourceId] : undefined; + const provider = providerId ? providerDict[providerId] : undefined; + + return { + ...finding, + relationships: { + scan: scan + ? { data: scan, attributes: scan.attributes } + : undefined, + resource: resource, + provider: provider, + }, + } as FindingProps; + })() + : null; // Extract unique regions, services, categories from the new endpoint const uniqueRegions = metadataInfoData?.data?.attributes?.regions || []; @@ -98,6 +164,7 @@ export default async function Findings({ }> + {processedFinding && } ); } diff --git a/ui/components/findings/finding-details-sheet.tsx b/ui/components/findings/finding-details-sheet.tsx new file mode 100644 index 0000000000..ddea0e13c4 --- /dev/null +++ b/ui/components/findings/finding-details-sheet.tsx @@ -0,0 +1,46 @@ +"use client"; + +import { usePathname, useRouter, useSearchParams } from "next/navigation"; + +import { + Sheet, + SheetContent, + SheetDescription, + SheetHeader, + SheetTitle, +} from "@/components/ui/sheet"; +import { FindingProps } from "@/types/components"; + +import { FindingDetail } from "./table/finding-detail"; + +interface FindingDetailsSheetProps { + finding: FindingProps; +} + +export const FindingDetailsSheet = ({ finding }: FindingDetailsSheetProps) => { + const router = useRouter(); + const pathname = usePathname(); + const searchParams = useSearchParams(); + + const handleOpenChange = (open: boolean) => { + if (!open) { + const params = new URLSearchParams(searchParams.toString()); + params.delete("id"); + router.push(`${pathname}?${params.toString()}`, { scroll: false }); + } + }; + + return ( + + + + Finding Details + + View the finding details + + + + + + ); +}; diff --git a/ui/components/findings/index.ts b/ui/components/findings/index.ts index 2e2bece9f7..a43ec5f622 100644 --- a/ui/components/findings/index.ts +++ b/ui/components/findings/index.ts @@ -1 +1,2 @@ +export * from "./finding-details-sheet"; export * from "./muted"; diff --git a/ui/components/findings/table/column-findings.tsx b/ui/components/findings/table/column-findings.tsx index 8f5470ac57..761d5f4e0e 100644 --- a/ui/components/findings/table/column-findings.tsx +++ b/ui/components/findings/table/column-findings.tsx @@ -4,10 +4,8 @@ import { ColumnDef, RowSelectionState } from "@tanstack/react-table"; import { Database } from "lucide-react"; import { useSearchParams } from "next/navigation"; -import { - DataTableRowActions, - FindingDetail, -} from "@/components/findings/table"; +import { FindingDetail } from "@/components/findings/table"; +import { DataTableRowActions } from "@/components/findings/table"; import { Checkbox } from "@/components/shadcn"; import { DateWithTime, SnippetChip } from "@/components/ui/entities"; import { @@ -57,23 +55,10 @@ const FindingTitleCell = ({ row }: { row: { original: FindingProps } }) => { const isOpen = findingId === row.original.id; const { checktitle } = row.original.attributes.check_metadata; - const handleOpenChange = (open: boolean) => { - const params = new URLSearchParams(searchParams); - - if (open) { - params.set("id", row.original.id); - } else { - params.delete("id"); - } - - window.history.pushState({}, "", `?${params.toString()}`); - }; - return (

diff --git a/ui/components/findings/table/finding-detail.tsx b/ui/components/findings/table/finding-detail.tsx index 48f1e9560b..439188a20c 100644 --- a/ui/components/findings/table/finding-detail.tsx +++ b/ui/components/findings/table/finding-detail.tsx @@ -1,6 +1,5 @@ "use client"; -import { Snippet } from "@heroui/snippet"; import { ExternalLink, Link, X } from "lucide-react"; import { usePathname, useSearchParams } from "next/navigation"; import type { ReactNode } from "react"; @@ -33,6 +32,7 @@ import { StatusFindingBadge, } from "@/components/ui/table/status-finding-badge"; import { buildGitFileUrl, extractLineRangeFromUid } from "@/lib/iac-utils"; +import { cn } from "@/lib/utils"; import { FindingProps, ProviderType } from "@/types"; import { Muted } from "../muted"; @@ -196,16 +196,16 @@ export const FindingDetail = ({ {attributes.status === "FAIL" && ( - {attributes.check_metadata.risk} - + )} @@ -255,11 +255,13 @@ export const FindingDetail = ({ {/* CLI Command section */} {attributes.check_metadata.remediation.code.cli && ( - +

{attributes.check_metadata.remediation.code.cli} - +
)} diff --git a/ui/components/scans/auto-refresh.tsx b/ui/components/scans/auto-refresh.tsx index fa2c93424f..3a61bbba8e 100644 --- a/ui/components/scans/auto-refresh.tsx +++ b/ui/components/scans/auto-refresh.tsx @@ -5,9 +5,11 @@ import { useEffect } from "react"; interface AutoRefreshProps { hasExecutingScan: boolean; + /** Optional callback for client-side refresh (used when data is managed in local state) */ + onRefresh?: () => void | Promise; } -export function AutoRefresh({ hasExecutingScan }: AutoRefreshProps) { +export function AutoRefresh({ hasExecutingScan, onRefresh }: AutoRefreshProps) { const router = useRouter(); const searchParams = useSearchParams(); @@ -19,11 +21,17 @@ export function AutoRefresh({ hasExecutingScan }: AutoRefreshProps) { if (scanId) return; const interval = setInterval(() => { - router.refresh(); + if (onRefresh) { + // Use custom refresh callback for client-side state management + onRefresh(); + } else { + // Default: trigger server-side refresh + router.refresh(); + } }, 5000); return () => clearInterval(interval); - }, [hasExecutingScan, router, searchParams]); + }, [hasExecutingScan, router, searchParams, onRefresh]); return null; } diff --git a/ui/components/ui/breadcrumbs/breadcrumb-navigation.tsx b/ui/components/ui/breadcrumbs/breadcrumb-navigation.tsx index cbc46c6d25..02740bae7a 100644 --- a/ui/components/ui/breadcrumbs/breadcrumb-navigation.tsx +++ b/ui/components/ui/breadcrumbs/breadcrumb-navigation.tsx @@ -54,6 +54,7 @@ export function BreadcrumbNavigation({ "/manage-groups": "lucide:users-2", "/services": "lucide:server", "/workloads": "lucide:layers", + "/attack-paths": "lucide:git-branch", }; const pathSegments = pathname @@ -156,6 +157,7 @@ export function BreadcrumbNavigation({ > {breadcrumb.icon && typeof breadcrumb.icon === "string" ? (